From 4eb0db377cea717a8b7c00d639dd048e597f1466 Mon Sep 17 00:00:00 2001 From: Roy Razon Date: Sun, 3 Dec 2023 16:21:12 +0200 Subject: [PATCH] local build (#340) See https://preevy.dev/recipes/faster-build --- README.md | 9 +- package.json | 2 +- packages/cli-common/package.json | 6 +- .../cli-common/src/commands/base-command.ts | 27 +- .../cli-common/src/hooks/init/load-plugins.ts | 1 + packages/cli-common/src/index.ts | 6 +- .../src/lib/common-flags/build-flags.ts | 69 +++ .../index.ts} | 30 +- packages/cli-common/src/lib/flags.ts | 3 - packages/cli/bin/dev | 1 + packages/cli/bin/run | 1 + packages/cli/package.json | 7 +- packages/cli/src/commands/down.ts | 2 +- packages/cli/src/commands/logs.ts | 38 +- packages/cli/src/commands/ls.ts | 5 +- packages/cli/src/commands/proxy/connect.ts | 8 +- packages/cli/src/commands/proxy/disconnect.ts | 7 +- packages/cli/src/commands/proxy/urls.ts | 6 +- packages/cli/src/commands/purge.ts | 2 +- packages/cli/src/commands/shell.ts | 5 +- packages/cli/src/commands/up.ts | 141 ++++-- packages/cli/src/commands/urls.ts | 6 +- packages/cli/src/commands/version.ts | 2 +- packages/cli/src/driver-command.ts | 2 +- packages/cli/src/hooks/init/telemetry.ts | 2 +- packages/cli/tsconfig.json | 2 +- packages/common/package.json | 6 +- packages/compose-tunnel-agent/package.json | 6 +- packages/core/package.json | 6 +- packages/core/src/build.test.ts | 413 ++++++++++++++++++ packages/core/src/build.ts | 113 +++++ packages/core/src/closable.ts | 17 - packages/core/src/commands/build.ts | 58 +++ packages/core/src/commands/index.ts | 7 +- packages/core/src/commands/model.ts | 80 ++++ packages/core/src/commands/up.ts | 163 +++++++ packages/core/src/commands/up/index.ts | 183 -------- .../core/src/compose-tunnel-agent-client.ts | 7 +- packages/core/src/compose/client.ts | 19 +- packages/core/src/compose/model.ts | 106 +---- packages/core/src/compose/remote.ts | 215 ++++++++- .../core/src/compose/script-injection.test.ts | 27 +- packages/core/src/compose/script-injection.ts | 9 +- packages/core/src/compose/service-links.ts | 6 + packages/core/src/docker.ts | 53 ++- packages/core/src/driver/driver.ts | 17 +- packages/core/src/driver/index.ts | 3 +- .../driver/{machine.ts => machine-model.ts} | 0 .../machine-operations.ts} | 47 +- packages/core/src/driver/ssh.ts | 6 +- packages/core/src/env-id.ts | 2 +- packages/core/src/git.ts | 4 +- packages/core/src/index.ts | 7 +- packages/core/src/nulls.ts | 6 + packages/core/src/remote-files.ts | 16 + packages/core/src/ssh/client/forward-out.ts | 5 +- packages/core/src/ssh/client/index.ts | 2 +- packages/core/src/ssh/client/sftp.ts | 2 +- packages/core/src/telemetry/emitter.ts | 9 +- packages/core/src/timing.ts | 6 + packages/core/src/tunneling/model.ts | 3 +- packages/core/tsconfig.json | 2 +- packages/driver-azure/package.json | 6 +- packages/driver-gce/package.json | 6 +- packages/driver-kube-pod/package.json | 6 +- .../src/driver/client/port-forward.ts | 5 +- packages/driver-kube-pod/src/driver/driver.ts | 11 +- packages/driver-lightsail/package.json | 6 +- packages/plugin-github/README.md | 19 +- packages/plugin-github/package.json | 6 +- .../src/commands/github/pr/comment.ts | 2 +- .../src/commands/github/pr/uncomment.ts | 2 +- packages/plugin-github/src/flags.ts | 19 +- packages/plugin-github/src/hooks.ts | 61 ++- packages/plugin-github/src/index.ts | 7 +- site/docs/intro/under-the-hood.md | 28 +- site/docs/recipes/faster-build.md | 368 ++++++++++++++++ site/docs/recipes/service-discovery.md | 2 +- site/docs/roadmap.md | 4 - tunnel-server/package.json | 6 +- yarn.lock | 163 ++++--- 81 files changed, 2092 insertions(+), 656 deletions(-) create mode 100644 packages/cli-common/src/lib/common-flags/build-flags.ts rename packages/cli-common/src/lib/{common-flags.ts => common-flags/index.ts} (64%) create mode 100644 packages/core/src/build.test.ts create mode 100644 packages/core/src/build.ts delete mode 100644 packages/core/src/closable.ts create mode 100644 packages/core/src/commands/build.ts create mode 100644 packages/core/src/commands/model.ts create mode 100644 packages/core/src/commands/up.ts delete mode 100644 packages/core/src/commands/up/index.ts create mode 100644 packages/core/src/compose/service-links.ts rename packages/core/src/driver/{machine.ts => machine-model.ts} (100%) rename packages/core/src/{commands/up/machine.ts => driver/machine-operations.ts} (81%) create mode 100644 packages/core/src/timing.ts create mode 100644 site/docs/recipes/faster-build.md diff --git a/README.md b/README.md index 963e7908..d55327d6 100644 --- a/README.md +++ b/README.md @@ -61,6 +61,7 @@ Visit The full documentation here: https://preevy.dev/ - [CLI](#cli) - [Tunnel server](#tunnel-server) - [CI Integration](#ci-integration) + - [Faster builds in CI](#faster-builds-in-ci) - [Security](#security) - [Private environments](#private-environments) - [Notice on preview environments exposure](#notice-on-preview-environments-exposure) @@ -150,7 +151,7 @@ A Docker/OCI image is available on ghcr.io: ghcr.io/livecycle/preevy/tunnel-serv ## CI Integration -Preevy is also designed to work seamlessly with your CI, allowing you to easily import a shared preview profile shared in AWS S3 and Google Cloud Storage (GCS). +Preevy is designed to work seamlessly with your CI, allowing you to easily import a shared preview profile shared in AWS S3 and Google Cloud Storage (GCS). Profiles are created using `preevy init`. Choose a S3/GCS URL for storing the profile - Preevy will create a bucket if one doesn't exist. @@ -162,6 +163,10 @@ Examples: - [Using AWS Lightsail](https://preevy.dev/ci/example-github-actions) - [Using Google Cloud Engine](https://preevy.dev/ci/example-github-actions-gce) +### Faster builds in CI + +Check out our [documentation](https://preevy.dev/recipes/faster-build) to find out how to speed up your builds and reduce the costs of your preview environments by running Preevy with BuildKit builders in CI. + ## Security In case you find a security issue or have something you would like to discuss, refer to our [security policy](https://github.com/livecycle/preevy/blob/main/security.md). @@ -333,6 +338,8 @@ The Preevy CLI collects telemetry data to help us understand product usage and d The data collected is *anonymous* and cannot be used to uniquely identify a user. Access to the data is limited to Livecycle's employees and not shared with 3rd parties. +To see the collected data, set the environment variable `PREEVY_TELEMETRY_FILE` to a filename. + We appreciate the usage data sent to us as - it's the most basic and raw type of feedback we get from our users. However, if you are concerned about sending out data, you may choose to disable telemetry. Telemetry collection can be disabled by setting the environment variable `PREEVY_DISABLE_TELEMETRY` to `1` or `true`. diff --git a/package.json b/package.json index 9cf35b3a..c2219427 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,7 @@ "patch-package": "^8.0.0", "postinstall-postinstall": "^2.1.0", "syncpack": "^9.8.4", - "typescript": "^5.0.4" + "typescript": "^5.2.2" }, "resolutions": { "**/@oclif/core": "livecycle/oclif-core-patched-for-preevy#v3.12.0-preevy-patch-10" diff --git a/packages/cli-common/package.json b/packages/cli-common/package.json index c585b3fc..38a6aefa 100644 --- a/packages/cli-common/package.json +++ b/packages/cli-common/package.json @@ -18,8 +18,8 @@ "@jest/globals": "29.7.0", "@types/lodash": "^4.14.192", "@types/node": "18", - "@typescript-eslint/eslint-plugin": "6.7.4", - "@typescript-eslint/parser": "6.7.4", + "@typescript-eslint/eslint-plugin": "6.10.0", + "@typescript-eslint/parser": "6.10.0", "eslint": "^8.36.0", "eslint-config-airbnb": "^19.0.4", "eslint-plugin-import": "^2.27.5", @@ -29,7 +29,7 @@ "jest": "29.7.0", "shx": "^0.3.3", "tslib": "^2.5.0", - "typescript": "^5.0.4" + "typescript": "^5.2.2" }, "scripts": { "lint": "eslint . --ext .ts,.tsx --cache", diff --git a/packages/cli-common/src/commands/base-command.ts b/packages/cli-common/src/commands/base-command.ts index 1e4a3951..038713b4 100644 --- a/packages/cli-common/src/commands/base-command.ts +++ b/packages/cli-common/src/commands/base-command.ts @@ -3,6 +3,7 @@ import { LogLevel, Logger, logLevels, ComposeModel, ProcessError, telemetryEmitter, } from '@preevy/core' import { asyncReduce } from 'iter-tools-es' +import { ParsingToken } from '@oclif/core/lib/interfaces/parser' import { commandLogger } from '../lib/log' import { composeFlags, pluginFlags } from '../lib/common-flags' @@ -10,6 +11,8 @@ import { composeFlags, pluginFlags } from '../lib/common-flags' export type Flags = Interfaces.InferredFlags export type Args = Interfaces.InferredArgs +const argsFromRaw = (raw: ParsingToken[]) => raw.filter(arg => arg.type === 'arg').map(arg => arg.input).filter(Boolean) + abstract class BaseCommand extends Command { static baseFlags = { 'log-level': Flags.custom({ @@ -35,24 +38,29 @@ abstract class BaseCommand extends Comm protected flags!: Flags protected args!: Args + #rawArgs!: ParsingToken[] #userModel?: ComposeModel protected async userModel() { - const { initialUserModel, preevyHooks } = this.config + const { initialUserModel } = this.config if (initialUserModel instanceof Error) { return initialUserModel } if (!this.#userModel) { - this.#userModel = await asyncReduce( - initialUserModel, - (userModel, hook) => hook({ log: this.logger, userModel }, undefined), - preevyHooks?.userModelFilter || [], - ) + this.#userModel = await this.modelFilter(initialUserModel) } return this.#userModel } + protected get modelFilter() { + return (model: ComposeModel) => asyncReduce( + model, + (filteredModel, hook) => hook({ log: this.logger, userModel: filteredModel }, undefined), + this.config.preevyHooks?.userModelFilter || [], + ) + } + protected get preevyConfig() { return this.config.preevyConfig } @@ -76,17 +84,18 @@ abstract class BaseCommand extends Comm public async init(): Promise { await super.init() - const { args, flags } = await this.parse({ + const { args, flags, raw } = await this.parse({ flags: this.ctor.flags, baseFlags: super.ctor.baseFlags, args: this.ctor.args, - strict: this.ctor.strict, + strict: false, }) this.args = args as Args this.flags = flags as Flags if (this.flags.debug) { oclifSettings.debug = true } + this.#rawArgs = raw this.logger = commandLogger(this, this.flags.json ? 'stderr' : 'stdout') this.stdErrLogger = commandLogger(this, 'stderr') } @@ -94,6 +103,8 @@ abstract class BaseCommand extends Comm protected logger!: Logger protected stdErrLogger!: Logger + protected get rawArgs() { return argsFromRaw(this.#rawArgs) } + public get logLevel(): LogLevel { return this.flags['log-level'] ?? this.flags.debug ? 'debug' : 'info' } diff --git a/packages/cli-common/src/hooks/init/load-plugins.ts b/packages/cli-common/src/hooks/init/load-plugins.ts index 7acb96e7..9588793e 100644 --- a/packages/cli-common/src/hooks/init/load-plugins.ts +++ b/packages/cli-common/src/hooks/init/load-plugins.ts @@ -53,6 +53,7 @@ export const initHook: OclifHook<'init'> = async function hook(args) { async () => await localComposeClient({ composeFiles, projectName: flags.project, + projectDirectory: process.cwd(), }).getModelOrError(), { text: `Loading compose file${composeFiles.length > 1 ? 's' : ''}: ${composeFiles.join(', ')}`, diff --git a/packages/cli-common/src/index.ts b/packages/cli-common/src/index.ts index b34a45e4..5eceef1d 100644 --- a/packages/cli-common/src/index.ts +++ b/packages/cli-common/src/index.ts @@ -2,7 +2,9 @@ export * from './lib/plugins/model' export * as text from './lib/text' export { HookName, HookFunc, HooksListeners, Hooks } from './lib/hooks' export { PluginContext, PluginInitContext } from './lib/plugins/context' -export { composeFlags, pluginFlags, envIdFlags, tunnelServerFlags, urlFlags } from './lib/common-flags' -export { formatFlagsToArgs, parseFlags, ParsedFlags, argsFromRaw } from './lib/flags' +export { + composeFlags, pluginFlags, envIdFlags, tunnelServerFlags, urlFlags, buildFlags, tableFlags, parseBuildFlags, +} from './lib/common-flags' +export { formatFlagsToArgs, parseFlags, ParsedFlags } from './lib/flags' export { initHook } from './hooks/init/load-plugins' export { default as BaseCommand } from './commands/base-command' diff --git a/packages/cli-common/src/lib/common-flags/build-flags.ts b/packages/cli-common/src/lib/common-flags/build-flags.ts new file mode 100644 index 00000000..896fc99c --- /dev/null +++ b/packages/cli-common/src/lib/common-flags/build-flags.ts @@ -0,0 +1,69 @@ +import { Flags } from '@oclif/core' +import { InferredFlags } from '@oclif/core/lib/interfaces' +import { BuildSpec, parseRegistry } from '@preevy/core' + +const helpGroup = 'BUILD' + +export const buildFlags = { + 'no-build': Flags.boolean({ + description: 'Do not build images', + helpGroup, + allowNo: false, + default: false, + required: false, + }), + registry: Flags.string({ + description: 'Image registry. If this flag is specified, the "build-context" flag defaults to "*local"', + helpGroup, + required: false, + }), + 'registry-single-name': Flags.string({ + description: 'Use single name for image registry, ECR-style. Default: auto-detect from "registry" flag', + helpGroup, + required: false, + dependsOn: ['registry'], + }), + 'no-registry-single-name': Flags.boolean({ + description: 'Disable auto-detection for ECR-style registry single name', + helpGroup, + allowNo: false, + required: false, + exclusive: ['registry-single-name'], + }), + 'no-registry-cache': Flags.boolean({ + description: 'Do not add the registry as a cache source and target', + helpGroup, + allowNo: false, + required: false, + dependsOn: ['registry'], + }), + builder: Flags.string({ + description: 'Builder to use', + helpGroup, + required: false, + }), + 'no-cache': Flags.boolean({ + description: 'Do not use cache when building the images', + helpGroup, + allowNo: false, + required: false, + }), +} as const + +export const parseBuildFlags = (flags: Omit, 'json'>): BuildSpec | undefined => { + if (flags['no-build']) { + return undefined + } + + return { + builder: flags.builder, + noCache: flags['no-cache'], + cacheFromRegistry: !flags['no-registry-cache'], + ...flags.registry && { + registry: parseRegistry({ + registry: flags.registry, + singleName: flags['no-registry-single-name'] ? false : flags['registry-single-name'], + }), + }, + } +} diff --git a/packages/cli-common/src/lib/common-flags.ts b/packages/cli-common/src/lib/common-flags/index.ts similarity index 64% rename from packages/cli-common/src/lib/common-flags.ts rename to packages/cli-common/src/lib/common-flags/index.ts index bf964811..57d1e9f4 100644 --- a/packages/cli-common/src/lib/common-flags.ts +++ b/packages/cli-common/src/lib/common-flags/index.ts @@ -1,17 +1,23 @@ -import { Flags } from '@oclif/core' -import { DEFAULT_PLUGINS } from './plugins/default-plugins' +import { Flags, ux } from '@oclif/core' +import { mapValues } from 'lodash' +import { EOL } from 'os' +import { DEFAULT_PLUGINS } from '../plugins/default-plugins' + +export * from './build-flags' + +export const tableFlags = mapValues(ux.table.flags(), f => ({ ...f, helpGroup: 'OUTPUT' })) as ReturnType const projectFlag = { project: Flags.string({ char: 'p', - description: 'Project name. Defaults to the Compose project name', + summary: 'Project name. Defaults to the Compose project name', required: false, helpGroup: 'GLOBAL', }), } export const composeFlags = { file: Flags.string({ - description: 'Compose configuration file', + summary: 'Compose configuration file', multiple: true, delimiter: ',', singleValue: true, @@ -21,7 +27,7 @@ export const composeFlags = { helpGroup: 'GLOBAL', }), 'system-compose-file': Flags.string({ - description: 'Add extra Compose configuration file without overriding the defaults', + summary: 'Add extra Compose configuration file without overriding the defaults', multiple: true, delimiter: ',', singleValue: true, @@ -52,7 +58,8 @@ export const pluginFlags = { export const envIdFlags = { id: Flags.string({ - description: 'Environment id - affects created URLs. If not specified, will try to detect automatically', + summary: 'Environment id', + description: `Affects created URLs${EOL}If not specified, will detect from the current Git context`, required: false, }), ...projectFlag, @@ -60,30 +67,31 @@ export const envIdFlags = { export const tunnelServerFlags = { 'tunnel-url': Flags.string({ - description: 'Tunnel url, specify ssh://hostname[:port] or ssh+tls://hostname[:port]', + summary: 'Tunnel url, specify ssh://hostname[:port] or ssh+tls://hostname[:port]', char: 't', default: 'ssh+tls://livecycle.run' ?? process.env.PREVIEW_TUNNEL_OVERRIDE, }), 'tls-hostname': Flags.string({ - description: 'Override TLS server name when tunneling via HTTPS', + summary: 'Override TLS server name when tunneling via HTTPS', required: false, }), 'insecure-skip-verify': Flags.boolean({ - description: 'Skip TLS or SSH certificate verification', + summary: 'Skip TLS or SSH certificate verification', default: false, }), } as const export const urlFlags = { 'include-access-credentials': Flags.boolean({ - description: 'Include access credentials for basic auth for each service URL', + summary: 'Include access credentials for basic auth for each service URL', default: false, }), 'show-preevy-service-urls': Flags.boolean({ - description: 'Show URLs for internal Preevy services', + summary: 'Show URLs for internal Preevy services', default: false, }), 'access-credentials-type': Flags.custom<'browser' | 'api'>({ + summary: 'Access credentials type', options: ['api', 'browser'], dependsOn: ['include-access-credentials'], default: 'browser', diff --git a/packages/cli-common/src/lib/flags.ts b/packages/cli-common/src/lib/flags.ts index 3e22f43a..10c1f6a9 100644 --- a/packages/cli-common/src/lib/flags.ts +++ b/packages/cli-common/src/lib/flags.ts @@ -1,5 +1,4 @@ import { Flag } from '@oclif/core/lib/interfaces' -import { ParsingToken } from '@oclif/core/lib/interfaces/parser' import { Parser } from '@oclif/core/lib/parser/parse' type FlagSpec =Pick, 'type' | 'default'> @@ -35,5 +34,3 @@ export const parseFlags = async (def: T, argv: string[]) => (await }).parse()).flags export type ParsedFlags = Omit>>, 'json'> - -export const argsFromRaw = (raw: ParsingToken[]) => raw.filter(arg => arg.type === 'arg').map(arg => arg.input).filter(Boolean) diff --git a/packages/cli/bin/dev b/packages/cli/bin/dev index 8d64835a..21908218 100755 --- a/packages/cli/bin/dev +++ b/packages/cli/bin/dev @@ -1,5 +1,6 @@ #!/usr/bin/env node +require('disposablestack/auto') const oclif = require('@oclif/core') const path = require('path') diff --git a/packages/cli/bin/run b/packages/cli/bin/run index c44179b9..44a97010 100755 --- a/packages/cli/bin/run +++ b/packages/cli/bin/run @@ -1,5 +1,6 @@ #!/usr/bin/env node require('source-map-support').install() +require('disposablestack/auto') const oclif = require('@oclif/core') diff --git a/packages/cli/package.json b/packages/cli/package.json index da07b299..d8b5205b 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -30,6 +30,7 @@ "@preevy/driver-kube-pod": "0.0.56", "@preevy/driver-lightsail": "0.0.56", "@preevy/plugin-github": "0.0.56", + "disposablestack": "^1.1.2", "inquirer": "^8.0.0", "inquirer-autocomplete-prompt": "^2.0.0", "iter-tools-es": "^7.5.3", @@ -44,8 +45,8 @@ "@types/lodash": "^4.14.192", "@types/node": "18", "@types/shell-escape": "^0.2.1", - "@typescript-eslint/eslint-plugin": "6.7.4", - "@typescript-eslint/parser": "6.7.4", + "@typescript-eslint/eslint-plugin": "6.10.0", + "@typescript-eslint/parser": "6.10.0", "eslint": "^8.36.0", "eslint-config-airbnb": "^19.0.4", "eslint-config-oclif": "^4", @@ -61,7 +62,7 @@ "ts-node": "^10.9.1", "tsconfig-paths": "^4.2.0", "tslib": "^2.5.0", - "typescript": "^5.0.4" + "typescript": "^5.2.2" }, "oclif": { "bin": "preevy", diff --git a/packages/cli/src/commands/down.ts b/packages/cli/src/commands/down.ts index 81857e77..2555afb3 100644 --- a/packages/cli/src/commands/down.ts +++ b/packages/cli/src/commands/down.ts @@ -26,7 +26,7 @@ export default class Down extends DriverCommand { async run(): Promise { const log = this.logger - const { flags } = await this.parse(Down) + const { flags } = this const driver = await this.driver() const envId = await findEnvId({ diff --git a/packages/cli/src/commands/logs.ts b/packages/cli/src/commands/logs.ts index cf815bb6..95f3879b 100644 --- a/packages/cli/src/commands/logs.ts +++ b/packages/cli/src/commands/logs.ts @@ -2,10 +2,9 @@ import yaml from 'yaml' import { Args, Flags, Interfaces } from '@oclif/core' import { addBaseComposeTunnelAgentService, - localComposeClient, wrapWithDockerSocket, findEnvId, MachineConnection, ComposeModel, remoteUserModel, + localComposeClient, findEnvId, MachineConnection, ComposeModel, remoteUserModel, dockerEnvContext, } from '@preevy/core' import { COMPOSE_TUNNEL_AGENT_SERVICE_NAME } from '@preevy/common' -import { argsFromRaw } from '@preevy/cli-common' import DriverCommand from '../driver-command' import { envIdFlags } from '../common-flags' @@ -80,8 +79,7 @@ export default class Logs extends DriverCommand { async run(): Promise { const log = this.logger - const { flags, raw } = await this.parse(Logs) - const restArgs = argsFromRaw(raw) + const { flags, rawArgs: restArgs } = this let connection: MachineConnection let userModel: ComposeModel @@ -99,23 +97,21 @@ export default class Logs extends DriverCommand { connection = await this.connect(envId) } - try { - const compose = localComposeClient({ - composeFiles: Buffer.from(yaml.stringify(addBaseComposeTunnelAgentService(userModel))), - projectName: flags.project, - }) + const compose = localComposeClient({ + composeFiles: Buffer.from(yaml.stringify(addBaseComposeTunnelAgentService(userModel))), + projectName: flags.project, + projectDirectory: process.cwd(), + }) - const withDockerSocket = wrapWithDockerSocket({ connection, log }) - await withDockerSocket(() => compose.spawnPromise( - [ - 'logs', - ...serializeDockerComposeLogsFlags(flags), - ...validateServices(restArgs, userModel), - ], - { stdio: 'inherit' }, - )) - } finally { - await connection.close() - } + await using dockerContext = await dockerEnvContext({ connection, log }) + + await compose.spawnPromise( + [ + 'logs', + ...serializeDockerComposeLogsFlags(flags), + ...validateServices(restArgs, userModel), + ], + { stdio: 'inherit', env: dockerContext.env }, + ) } } diff --git a/packages/cli/src/commands/ls.ts b/packages/cli/src/commands/ls.ts index e80229c8..d443eabd 100644 --- a/packages/cli/src/commands/ls.ts +++ b/packages/cli/src/commands/ls.ts @@ -1,6 +1,7 @@ import { ux } from '@oclif/core' import { asyncMap, asyncToArray } from 'iter-tools-es' import { commands } from '@preevy/core' +import { tableFlags } from '@preevy/cli-common' import DriverCommand from '../driver-command' // eslint-disable-next-line no-use-before-define @@ -8,7 +9,7 @@ export default class Ls extends DriverCommand { static description = 'List preview environments' static flags = { - ...ux.table.flags(), + ...tableFlags, } static args = { @@ -17,7 +18,7 @@ export default class Ls extends DriverCommand { static enableJsonFlag = true async run(): Promise { - const { flags } = await this.parse(Ls) + const { flags } = this const driver = await this.driver() const machines = await asyncToArray( asyncMap( diff --git a/packages/cli/src/commands/proxy/connect.ts b/packages/cli/src/commands/proxy/connect.ts index e427abde..84a70084 100644 --- a/packages/cli/src/commands/proxy/connect.ts +++ b/packages/cli/src/commands/proxy/connect.ts @@ -1,6 +1,6 @@ -import { ux, Args, Flags } from '@oclif/core' +import { Args, Flags } from '@oclif/core' import { jwkThumbprint, commands, profileStore, withSpinner, SshConnection, machineId, validateEnvId, normalizeEnvId, EnvId } from '@preevy/core' -import { tunnelServerFlags, urlFlags } from '@preevy/cli-common' +import { tableFlags, text, tunnelServerFlags, urlFlags } from '@preevy/cli-common' import { inspect } from 'util' import { formatPublicKey } from '@preevy/common' import { spawn } from 'child_process' @@ -15,7 +15,7 @@ export default class Connect extends ProfileCommand { static flags = { ...tunnelServerFlags, ...urlFlags, - ...ux.table.flags(), + ...tableFlags, id: Flags.string({ aliases: ['env-id'], description: 'specify the environment ID for this app', @@ -69,7 +69,7 @@ export default class Connect extends ProfileCommand { } else { const deviceId = (await machineId(this.config.dataDir)).substring(0, 2) envId = normalizeEnvId(`${composeProject}-dev-${deviceId}`) - this.logger.info(`Using environment ID ${envId}, based on Docker Compose and local device`) + this.logger.info(`Using environment ID ${text.code(envId)}, based on Docker Compose and local device`) } let client: SshConnection['client'] | undefined let hostKey: Buffer diff --git a/packages/cli/src/commands/proxy/disconnect.ts b/packages/cli/src/commands/proxy/disconnect.ts index 27f36c3e..8b4bda1c 100644 --- a/packages/cli/src/commands/proxy/disconnect.ts +++ b/packages/cli/src/commands/proxy/disconnect.ts @@ -1,5 +1,6 @@ -import { ux, Args } from '@oclif/core' +import { Args } from '@oclif/core' import { commands, execPromiseStdout } from '@preevy/core' +import { tableFlags } from '@preevy/cli-common' import ProfileCommand from '../../profile-command' // eslint-disable-next-line no-use-before-define @@ -7,7 +8,7 @@ export default class Disconnect extends ProfileCommand { static description = 'Disconnect tunneled local compose application' static flags = { - ...ux.table.flags(), + ...tableFlags, } static strict = false @@ -22,7 +23,7 @@ export default class Disconnect extends ProfileCommand { // eslint-disable-next-line class-methods-use-this async run(): Promise { - const { args } = await this.parse(Disconnect) + const { args } = this const inspector = commands.proxy.inspectRunningComposeApp(args['compose-project']) const agentContainer = await inspector.getPreevyAgentContainer() if (agentContainer) { diff --git a/packages/cli/src/commands/proxy/urls.ts b/packages/cli/src/commands/proxy/urls.ts index 8efc6bea..b3f97c69 100644 --- a/packages/cli/src/commands/proxy/urls.ts +++ b/packages/cli/src/commands/proxy/urls.ts @@ -1,5 +1,5 @@ -import { ux, Args } from '@oclif/core' -import { tunnelServerFlags, urlFlags, formatFlagsToArgs } from '@preevy/cli-common' +import { Args } from '@oclif/core' +import { tunnelServerFlags, urlFlags, formatFlagsToArgs, tableFlags } from '@preevy/cli-common' import { commands } from '@preevy/core' import { pick } from 'lodash' import PreevyUrlsCmd from '../urls' @@ -12,7 +12,7 @@ export default class Urls extends ProfileCommand { static flags = { ...tunnelServerFlags, ...urlFlags, - ...ux.table.flags(), + ...tableFlags, } static strict = false diff --git a/packages/cli/src/commands/purge.ts b/packages/cli/src/commands/purge.ts index 45fb128c..5fb6f084 100644 --- a/packages/cli/src/commands/purge.ts +++ b/packages/cli/src/commands/purge.ts @@ -65,7 +65,7 @@ export default class Purge extends DriverCommand { static strict = false async run(): Promise { - const { flags } = await this.parse(Purge) + const { flags } = this const driver = await this.driver() const resourcePlurals: Record = { [machineResourceType]: 'machines', ...driver.resourcePlurals } diff --git a/packages/cli/src/commands/shell.ts b/packages/cli/src/commands/shell.ts index a22ec0d2..12729f0e 100644 --- a/packages/cli/src/commands/shell.ts +++ b/packages/cli/src/commands/shell.ts @@ -1,6 +1,5 @@ import { Args } from '@oclif/core' import { commands } from '@preevy/core' -import { argsFromRaw } from '@preevy/cli-common' import DriverCommand from '../driver-command' // eslint-disable-next-line no-use-before-define @@ -21,11 +20,9 @@ export default class Shell extends DriverCommand { static enableJsonFlag = false async run(): Promise { - const { args, raw } = await this.parse(Shell) + const { args, rawArgs: restArgs } = this const driver = await this.driver() - const restArgs = argsFromRaw(raw).slice(1) - const result = await commands.shell({ envId: args.envId, args: restArgs, diff --git a/packages/cli/src/commands/up.ts b/packages/cli/src/commands/up.ts index 7ae3df53..b94a97a8 100644 --- a/packages/cli/src/commands/up.ts +++ b/packages/cli/src/commands/up.ts @@ -1,13 +1,17 @@ -import { Args, Flags, ux } from '@oclif/core' +import { Args, Flags } from '@oclif/core' import { + ComposeModel, + Logger, + ProfileStore, + TunnelOpts, addBaseComposeTunnelAgentService, - commands, findComposeTunnelAgentUrl, + commands, ensureMachine, findComposeTunnelAgentUrl, findEnvId, findProjectName, getTunnelNamesToServicePorts, jwkThumbprint, profileStore, telemetryEmitter, withSpinner, } from '@preevy/core' -import { argsFromRaw, tunnelServerFlags } from '@preevy/cli-common' +import { buildFlags, parseBuildFlags, tableFlags, text, tunnelServerFlags } from '@preevy/cli-common' import { inspect } from 'util' import { editUrl, tunnelNameResolver } from '@preevy/common' import MachineCreationDriverCommand from '../machine-creation-driver-command' @@ -15,6 +19,61 @@ import { envIdFlags, urlFlags } from '../common-flags' import { filterUrls, printUrls, writeUrlsToFile } from './urls' import { connectToTunnelServerSsh } from '../tunnel-server-client' +const fetchTunnelServerDetails = async ({ + log, + tunnelingKey, + envId, + userModel, + pStore, + tunnelOpts, +}: { + log: Logger + tunnelingKey: string | Buffer + envId: string + userModel: ComposeModel + pStore: ProfileStore + tunnelOpts: TunnelOpts +}) => { + const expectedTunnels = getTunnelNamesToServicePorts( + addBaseComposeTunnelAgentService(userModel), + tunnelNameResolver({ envId }), + ) + + const { hostKey, expectedServiceUrls } = await withSpinner(async spinner => { + spinner.text = 'Connecting...' + + const { hostKey: hk, client: tunnelServerSshClient } = await connectToTunnelServerSsh({ + tunnelingKey, + knownServerPublicKeys: pStore.knownServerPublicKeys, + tunnelOpts, + log, + spinner, + }) + + spinner.text = 'Getting server details...' + + const [{ clientId }, expectedTunnelUrls] = await Promise.all([ + tunnelServerSshClient.execHello(), + tunnelServerSshClient.execTunnelUrl(Object.keys(expectedTunnels)), + ]) + + log.debug('Tunnel server details: %j', { clientId, expectedTunnelUrls }) + + void tunnelServerSshClient.end() + + telemetryEmitter().group({ type: 'profile' }, { proxy_client_id: clientId }) + + const esu = Object.entries(expectedTunnels) + .map(([tunnel, { name, port }]) => ({ name, port, url: expectedTunnelUrls[tunnel] })) + + return { hostKey: hk, expectedServiceUrls: esu } + }, { opPrefix: 'Tunnel server', successText: 'Got tunnel server details' }) + + log.debug('expectedServiceUrls: %j', expectedServiceUrls) + + return { expectedServiceUrls, hostKey } +} + // eslint-disable-next-line no-use-before-define export default class Up extends MachineCreationDriverCommand { static description = 'Bring up a preview environment' @@ -22,6 +81,7 @@ export default class Up extends MachineCreationDriverCommand { static flags = { ...envIdFlags, ...tunnelServerFlags, + ...buildFlags, 'skip-unchanged-files': Flags.boolean({ description: 'Detect and skip unchanged files when copying (default: true)', default: true, @@ -38,7 +98,7 @@ export default class Up extends MachineCreationDriverCommand { default: 'https://app.livecycle.run/widget/widget-bootstrap.js', }), ...urlFlags, - ...ux.table.flags(), + ...tableFlags, } static strict = false @@ -51,8 +111,7 @@ export default class Up extends MachineCreationDriverCommand { } async run(): Promise { - const { flags, raw } = await this.parse(Up) - const restArgs = argsFromRaw(raw) + const { flags, rawArgs: restArgs } = this const driver = await this.driver() const machineCreationDriver = await this.machineCreationDriver() @@ -83,57 +142,48 @@ export default class Up extends MachineCreationDriverCommand { insecureSkipVerify: flags['insecure-skip-verify'], } - const expectedTunnels = getTunnelNamesToServicePorts( - addBaseComposeTunnelAgentService(userModel), - tunnelNameResolver({ envId }), - ) - - const { hostKey, expectedServiceUrls } = await withSpinner(async spinner => { - spinner.text = 'Connecting...' - - const { hostKey: hk, client: tunnelServerSshClient } = await connectToTunnelServerSsh({ - tunnelingKey, - knownServerPublicKeys: pStore.knownServerPublicKeys, - tunnelOpts, - log: this.logger, - spinner, - }) - - spinner.text = 'Getting server details...' - - const [{ clientId }, expectedTunnelUrls] = await Promise.all([ - tunnelServerSshClient.execHello(), - tunnelServerSshClient.execTunnelUrl(Object.keys(expectedTunnels)), - ]) - - this.logger.debug('Tunnel server details: %j', { clientId, expectedTunnelUrls }) + const { expectedServiceUrls, hostKey } = await fetchTunnelServerDetails({ + log: this.logger, + tunnelingKey, + envId, + userModel, + pStore, + tunnelOpts, + }) - void tunnelServerSshClient.end() + const injectWidgetScript = flags['enable-widget'] + ? editUrl(flags['livecycle-widget-url'], { queryParams: { profile: thumbprint, env: envId } }).toString() + : undefined - telemetryEmitter().group({ type: 'profile' }, { proxy_client_id: clientId }) + await using cleanup = new AsyncDisposableStack() - const esu = Object.entries(expectedTunnels) - .map(([tunnel, { name, port }]) => ({ name, port, url: expectedTunnelUrls[tunnel] })) + const { machine, connection, userAndGroup, dockerPlatform } = await ensureMachine({ + log: this.logger, + debug: this.flags.debug, + machineDriver: driver, + machineCreationDriver, + machineDriverName: this.driverName, + envId, + }) - return { hostKey: hk, expectedServiceUrls: esu } - }, { opPrefix: 'Tunnel server', successText: 'Got tunnel server details' }) + const machineStatusCommand = await driver.machineStatusCommand(machine) - this.logger.debug('expectedServiceUrls: %j', expectedServiceUrls) + cleanup.use(connection) - const injectWidgetScript = flags['enable-widget'] - ? editUrl(flags['livecycle-widget-url'], { queryParams: { profile: thumbprint, env: envId } }).toString() - : undefined + const buildSpec = parseBuildFlags(flags) - const { machine } = await commands.up({ + await commands.up({ + connection, + machineStatusCommand, + userAndGroup, + dockerPlatform, projectName, expectedServiceUrls, userSpecifiedServices: restArgs, debug: flags.debug, - machineDriver: driver, - machineDriverName: this.driverName, - machineCreationDriver, userSpecifiedProjectName: flags.project, composeFiles: this.config.composeFiles, + modelFilter: this.modelFilter, envId, scriptInjections: injectWidgetScript ? { 'livecycle-widget': { src: injectWidgetScript } } : undefined, tunnelOpts, @@ -144,9 +194,10 @@ export default class Up extends MachineCreationDriverCommand { cwd: process.cwd(), skipUnchangedFiles: flags['skip-unchanged-files'], version: this.config.version, + buildSpec, }) - this.log(`Preview environment ${envId} provisioned at: ${machine.locationDescription}`) + this.log(`Preview environment ${text.code(envId)} provisioned at: ${text.code(machine.locationDescription)}`) const composeTunnelServiceUrl = findComposeTunnelAgentUrl(expectedServiceUrls) const flatTunnels = await withSpinner(() => commands.urls({ diff --git a/packages/cli/src/commands/urls.ts b/packages/cli/src/commands/urls.ts index c37bcabf..61e67ef3 100644 --- a/packages/cli/src/commands/urls.ts +++ b/packages/cli/src/commands/urls.ts @@ -2,7 +2,7 @@ import fs from 'fs' import yaml from 'yaml' import { Args, ux, Interfaces } from '@oclif/core' import { FlatTunnel, Logger, ProfileStore, TunnelOpts, addBaseComposeTunnelAgentService, commands, findComposeTunnelAgentUrl, findEnvId, getTunnelNamesToServicePorts, profileStore } from '@preevy/core' -import { HooksListeners, PluginContext, text, tunnelServerFlags } from '@preevy/cli-common' +import { HooksListeners, PluginContext, tableFlags, text, tunnelServerFlags } from '@preevy/cli-common' import { asyncReduce } from 'iter-tools-es' import { tunnelNameResolver } from '@preevy/common' import { connectToTunnelServerSsh } from '../tunnel-server-client' @@ -56,7 +56,7 @@ export default class Urls extends ProfileCommand { static flags = { ...envIdFlags, ...tunnelServerFlags, - ...ux.table.flags(), + ...tableFlags, ...urlFlags, } @@ -102,7 +102,7 @@ export default class Urls extends ProfileCommand { async run(): Promise { const log = this.logger - const { flags, args } = await this.parse(Urls) + const { flags, args } = this const envId = await findEnvId({ userSpecifiedEnvId: flags.id, diff --git a/packages/cli/src/commands/version.ts b/packages/cli/src/commands/version.ts index 0137859c..5ae78995 100644 --- a/packages/cli/src/commands/version.ts +++ b/packages/cli/src/commands/version.ts @@ -7,7 +7,7 @@ export default class Version extends BaseCommand { static enableJsonFlag = true async run(): Promise { - const { flags } = await this.parse(Version) + const { flags } = this const log = this.logger if (flags.json) { diff --git a/packages/cli/src/driver-command.ts b/packages/cli/src/driver-command.ts index 11738789..a96301d9 100644 --- a/packages/cli/src/driver-command.ts +++ b/packages/cli/src/driver-command.ts @@ -78,7 +78,7 @@ abstract class DriverCommand extends ProfileCommand try { return await f(connection) } finally { - await connection.close() + connection[Symbol.dispose]() } } diff --git a/packages/cli/src/hooks/init/telemetry.ts b/packages/cli/src/hooks/init/telemetry.ts index f36aab33..f5fc8dbe 100644 --- a/packages/cli/src/hooks/init/telemetry.ts +++ b/packages/cli/src/hooks/init/telemetry.ts @@ -8,7 +8,7 @@ const hook: Hook.Init = async ({ config }) => { return } - const emitter = await createTelemetryEmitter(config) + const emitter = await createTelemetryEmitter({ ...config, filename: config.scopedEnvVar('TELEMETRY_FILE') }) registerEmitter(emitter) wireProcessExit(process, emitter) } diff --git a/packages/cli/tsconfig.json b/packages/cli/tsconfig.json index 9712537f..45fc8a1e 100644 --- a/packages/cli/tsconfig.json +++ b/packages/cli/tsconfig.json @@ -7,7 +7,7 @@ "rootDir": "src", "strict": true, "target": "es2019", - "lib": ["es2022"], + "lib": ["es2022", "ESNext.Disposable"], "esModuleInterop": true, "sourceMap": true, "baseUrl": "." diff --git a/packages/common/package.json b/packages/common/package.json index 2f3f4a77..00e5d29e 100644 --- a/packages/common/package.json +++ b/packages/common/package.json @@ -24,8 +24,8 @@ "@types/node": "18", "@types/shell-escape": "^0.2.1", "@types/ssh2": "^1.11.8", - "@typescript-eslint/eslint-plugin": "6.7.4", - "@typescript-eslint/parser": "6.7.4", + "@typescript-eslint/eslint-plugin": "6.10.0", + "@typescript-eslint/parser": "6.10.0", "esbuild": "^0.17.14", "eslint": "^8.36.0", "husky": "^8.0.0", @@ -33,7 +33,7 @@ "lint-staged": "^14.0.1", "ts-jest": "29.1.1", "tsx": "^3.12.3", - "typescript": "^5.0.4", + "typescript": "^5.2.2", "yaml": "^2.3.2" }, "scripts": { diff --git a/packages/compose-tunnel-agent/package.json b/packages/compose-tunnel-agent/package.json index 69e0ae2c..3d367174 100644 --- a/packages/compose-tunnel-agent/package.json +++ b/packages/compose-tunnel-agent/package.json @@ -38,8 +38,8 @@ "@types/node-fetch": "^2.6.3", "@types/shell-escape": "^0.2.1", "@types/ssh2": "^1.11.8", - "@typescript-eslint/eslint-plugin": "6.7.4", - "@typescript-eslint/parser": "6.7.4", + "@typescript-eslint/eslint-plugin": "6.10.0", + "@typescript-eslint/parser": "6.10.0", "esbuild": "^0.17.14", "eslint": "^8.36.0", "husky": "^8.0.0", @@ -49,7 +49,7 @@ "shx": "^0.3.3", "strip-ansi": "6.0.0", "tsx": "^3.12.3", - "typescript": "^5.0.4", + "typescript": "^5.2.2", "wait-for-expect": "^3.0.2" }, "scripts": { diff --git a/packages/core/package.json b/packages/core/package.json index 14ccd03f..9b590de4 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -53,8 +53,8 @@ "@types/sshpk": "^1.17.1", "@types/tar": "^6.1.4", "@types/tar-stream": "^2.2.2", - "@typescript-eslint/eslint-plugin": "6.7.4", - "@typescript-eslint/parser": "6.7.4", + "@typescript-eslint/eslint-plugin": "6.10.0", + "@typescript-eslint/parser": "6.10.0", "eslint": "^8.36.0", "eslint-config-airbnb": "^19.0.4", "eslint-plugin-import": "^2.27.5", @@ -66,7 +66,7 @@ "ts-jest": "29.1.1", "ts-node": "^10.9.1", "tslib": "^2.5.0", - "typescript": "^5.0.4" + "typescript": "^5.2.2" }, "scripts": { "test": "yarn jest", diff --git a/packages/core/src/build.test.ts b/packages/core/src/build.test.ts new file mode 100644 index 00000000..4c887016 --- /dev/null +++ b/packages/core/src/build.test.ts @@ -0,0 +1,413 @@ +import { describe, it, expect, beforeEach } from '@jest/globals' +import { ImageRegistry, generateBuild, parseRegistry } from './build' +import { ComposeModel, ComposeService } from './compose' + +describe('build', () => { + const ECR_BASE_REGISTRY = '123456789.dkr.ecr.us-east-1.amazonaws.com' + const ECR_REPO = 'my-repo' + const ECR_REPO2 = 'my-repo2' + + describe('parseRegistry', () => { + describe('when given an ECR-style registry', () => { + const REGISTRY = `${ECR_BASE_REGISTRY}/${ECR_REPO}` + + describe('when given singleName=false', () => { + let result: ImageRegistry + + beforeEach(() => { + result = parseRegistry({ registry: REGISTRY, singleName: false }) + }) + it('should not return a singleName', () => { + expect(result).toEqual({ registry: REGISTRY }) + }) + }) + + describe('when given singleName=string', () => { + let result: ImageRegistry + + beforeEach(() => { + result = parseRegistry({ registry: REGISTRY, singleName: ECR_REPO2 }) + }) + it('should return the given singleName', () => { + expect(result).toEqual({ registry: REGISTRY, singleName: ECR_REPO2 }) + }) + }) + + describe('when given singleName=undefined', () => { + let result: ImageRegistry + + beforeEach(() => { + result = parseRegistry({ registry: REGISTRY, singleName: undefined }) + }) + it('should auto-detect the singleName', () => { + expect(result).toEqual({ registry: ECR_BASE_REGISTRY, singleName: ECR_REPO }) + }) + }) + }) + + describe('when given an non-ECR-style registry', () => { + const REGISTRY = 'my-registry' + + describe('when given singleName=false', () => { + let result: ImageRegistry + + beforeEach(() => { + result = parseRegistry({ registry: REGISTRY, singleName: false }) + }) + it('should not return a singleName', () => { + expect(result).toEqual({ registry: REGISTRY }) + }) + }) + + describe('when given singleName=string', () => { + let result: ImageRegistry + + beforeEach(() => { + result = parseRegistry({ registry: REGISTRY, singleName: ECR_REPO2 }) + }) + it('should return the given singleName', () => { + expect(result).toEqual({ registry: REGISTRY, singleName: ECR_REPO2 }) + }) + }) + + describe('when given singleName=undefined', () => { + let result: ImageRegistry + + beforeEach(() => { + result = parseRegistry({ registry: REGISTRY, singleName: undefined }) + }) + it('should not return a singleName', () => { + expect(result).toEqual({ registry: REGISTRY }) + }) + }) + }) + }) + + describe('generateBuild', () => { + let result: ReturnType + let bakeArgs: string[] + + describe('sanity', () => { + beforeEach(() => { + result = generateBuild({ + composeModel: { + name: 'my-project', + services: { + frontend: { + build: { + context: '.', + target: 'dev', + }, + environment: { + FOO: 'bar', + }, + }, + db: { + image: 'mydb', + }, + }, + }, + buildSpec: { + builder: 'my-builder', + cacheFromRegistry: true, + noCache: false, + registry: { registry: 'my-registry' }, + }, + machineDockerPlatform: 'linux/amd64', + gitHash: 'abcdef', + }) + + bakeArgs = result.createBakeArgs('my-file.yaml') + }) + + it('should return a correct build model', () => { + expect(result.buildModel).toEqual({ + name: 'my-project', + services: { + frontend: { + build: { + context: '.', + target: 'dev', + cache_from: [ + 'my-registry/preevy-my-project-frontend:latest', + 'my-registry/preevy-my-project-frontend:abcdef', + ], + cache_to: [ + 'type=registry,ref=my-registry/preevy-my-project-frontend:latest,mode=max,oci-mediatypes=true,image-manifest=true', + ], + tags: [ + 'my-registry/preevy-my-project-frontend:latest', + 'my-registry/preevy-my-project-frontend:abcdef', + ], + }, + image: 'my-registry/preevy-my-project-frontend:abcdef', + }, + }, + } as ComposeModel) + }) + + it('should return the correct bake args', () => { + expect(bakeArgs).toEqual([ + '-f', 'my-file.yaml', + '--push', + '--builder=my-builder', + '--set=*.platform=linux/amd64', + ]) + }) + + it('should transform the deploy model correctly', () => { + expect(result.deployModel).toEqual({ + name: 'my-project', + services: { + frontend: { + build: { + context: '.', + target: 'dev', + }, + image: 'my-registry/preevy-my-project-frontend:abcdef', + environment: { + FOO: 'bar', + }, + }, + db: { + image: 'mydb', + }, + }, + } as ComposeModel) + }) + }) + + describe('ECR-style registry', () => { + beforeEach(() => { + result = generateBuild({ + composeModel: { + name: 'my-project', + services: { + frontend: { + build: { + context: '.', + target: 'dev', + }, + environment: { + FOO: 'bar', + }, + }, + db: { + image: 'mydb', + }, + }, + }, + buildSpec: { + builder: 'my-builder', + cacheFromRegistry: true, + noCache: false, + registry: { registry: 'my-registry', singleName: 'my-repo' }, + }, + machineDockerPlatform: 'linux/amd64', + gitHash: 'abcdef', + }) + }) + + it('should return a correct build model', () => { + expect(result.buildModel.services?.frontend).toMatchObject({ + build: { + cache_from: [ + 'my-registry/my-repo:preevy-my-project-frontend-latest', + 'my-registry/my-repo:preevy-my-project-frontend-abcdef', + ], + cache_to: [ + 'type=registry,ref=my-registry/my-repo:preevy-my-project-frontend-latest,mode=max,oci-mediatypes=true,image-manifest=true', + ], + tags: [ + 'my-registry/my-repo:preevy-my-project-frontend-latest', + 'my-registry/my-repo:preevy-my-project-frontend-abcdef', + ], + }, + image: 'my-registry/my-repo:preevy-my-project-frontend-abcdef', + } as ComposeService) + }) + + it('should transform the deploy model correctly', () => { + expect(result.deployModel.services?.frontend).toMatchObject({ + image: 'my-registry/my-repo:preevy-my-project-frontend-abcdef', + }) + }) + }) + + describe('when no registry is given', () => { + beforeEach(() => { + result = generateBuild({ + composeModel: { + name: 'my-project', + services: { + frontend: { + build: { + context: '.', + target: 'dev', + }, + environment: { + FOO: 'bar', + }, + }, + db: { + image: 'mydb', + }, + }, + }, + buildSpec: { + builder: 'my-builder', + cacheFromRegistry: true, + noCache: false, + registry: undefined, + }, + machineDockerPlatform: 'linux/amd64', + gitHash: 'abcdef', + }) + + bakeArgs = result.createBakeArgs('my-file.yaml') + }) + + it('should return a correct build model', () => { + expect(result.buildModel.services?.frontend).toMatchObject({ + build: { + tags: [ + 'preevy-my-project-frontend:latest', + 'preevy-my-project-frontend:abcdef', + ], + }, + image: 'preevy-my-project-frontend:abcdef', + } as ComposeService) + }) + + it('should transform the deploy model correctly', () => { + expect(result.deployModel.services?.frontend).toMatchObject({ + image: 'preevy-my-project-frontend:abcdef', + }) + }) + + it('should return the correct bake args', () => { + expect(bakeArgs).toContain('--load') + expect(bakeArgs).not.toContain('--push') + }) + }) + + describe('when no git hash is given', () => { + beforeEach(() => { + result = generateBuild({ + composeModel: { + name: 'my-project', + services: { + frontend: { + build: { + context: '.', + target: 'dev', + }, + environment: { + FOO: 'bar', + }, + }, + backend: { + build: { context: '.' }, + }, + db: { + image: 'mydb', + }, + }, + }, + buildSpec: { + builder: 'my-builder', + cacheFromRegistry: true, + noCache: false, + registry: { registry: 'my-registry' }, + }, + machineDockerPlatform: 'linux/amd64', + gitHash: undefined, + }) + + bakeArgs = result.createBakeArgs('my-file.yaml') + }) + + describe('build model', () => { + it('should contain a random tag', () => { + expect(result.buildModel.services?.frontend).toMatchObject({ + build: { + tags: [ + 'my-registry/preevy-my-project-frontend:latest', + expect.stringMatching(/^my-registry\/preevy-my-project-frontend:[a-z0-9]{8}$/), + ], + }, + image: expect.stringMatching(/^my-registry\/preevy-my-project-frontend:[a-z0-9]{8}$/), + }) + }) + + it('should match the image', () => { + expect(result.buildModel.services?.frontend?.build?.tags).toContain( + result.buildModel.services?.frontend?.image, + ) + expect(result.deployModel.services?.frontend?.image).toEqual( + result.buildModel.services?.frontend?.image, + ) + }) + + it('should match the random tag of the other service', () => { + const backendRandomTag = result.buildModel.services?.backend?.image?.split?.(':')?.[1] as string + expect(backendRandomTag).toMatch(/^[a-z0-9]{8}$/) + expect(backendRandomTag).toEqual(result.buildModel.services?.backend?.image?.split?.(':')?.[1]) + }) + }) + }) + + describe('when buildSpec.cacheFromRegistry=false and an image is given', () => { + beforeEach(() => { + result = generateBuild({ + composeModel: { + name: 'my-project', + services: { + frontend: { + build: { + context: '.', + target: 'dev', + cache_from: ['cf1', 'cf2'], + cache_to: ['ct1'], + }, + environment: { + FOO: 'bar', + }, + image: 'my-frontend', + }, + db: { + image: 'mydb', + }, + }, + }, + buildSpec: { + builder: 'my-builder', + cacheFromRegistry: false, + noCache: false, + registry: { registry: 'my-registry' }, + }, + machineDockerPlatform: 'linux/amd64', + gitHash: 'abcdef', + }) + + bakeArgs = result.createBakeArgs('my-file.yaml') + }) + + it('should return a correct build model', () => { + expect(result.buildModel.services?.frontend).toMatchObject({ + build: { + context: '.', + target: 'dev', + tags: [ + 'my-registry/preevy-my-project-frontend:latest', + 'my-registry/preevy-my-project-frontend:abcdef', + ], + cache_from: ['cf1', 'cf2'], + cache_to: ['ct1'], + }, + image: 'my-frontend', + }) + }) + }) + }) +}) diff --git a/packages/core/src/build.ts b/packages/core/src/build.ts new file mode 100644 index 00000000..c20d962f --- /dev/null +++ b/packages/core/src/build.ts @@ -0,0 +1,113 @@ +import { mapValues, pickBy } from 'lodash' +import { ComposeModel } from './compose' +import { randomString } from './strings' +import { hasProp } from './nulls' + +export type ImageRegistry = { registry: string; singleName?: string } + +export type BuildSpec = { + registry?: ImageRegistry + cacheFromRegistry?: boolean + noCache?: boolean + builder?: string +} + +const ecrRegex = /^(?[0-9]+\.dkr\.ecr\.[^.]+\.*\.amazonaws\.com)\/(?.+)/ + +export const parseRegistry = ( + { registry, singleName }: { registry: string; singleName: undefined | string | false }, +): ImageRegistry => { + if (singleName === undefined) { + const match = ecrRegex.exec(registry) + if (match) { + return match.groups as { registry: string; singleName: string } + } + } + return { registry, singleName: typeof singleName === 'string' ? singleName : undefined } +} + +type ImageRefFactory = ({ image, tag }: { image: string; tag: string }) => string + +const plainImageRefFactory: ImageRefFactory = ({ image, tag }) => `${image}:${tag}` + +const registryImageRefFactory = ({ registry, singleName }: ImageRegistry): ImageRefFactory => ( + singleName + ? ({ image, tag }) => `${registry}/${singleName}:${image}-${tag}` + : ({ image, tag }) => `${registry}/${image}:${tag}` +) + +export const generateBuild = ({ + composeModel, + buildSpec, + machineDockerPlatform, + gitHash, +}: { + composeModel: ComposeModel + buildSpec: BuildSpec + machineDockerPlatform: string + gitHash: string | undefined +}) => { + const tagSuffix = gitHash ?? randomString.lowercaseNumeric(8) + + const imageRef = buildSpec.registry + ? registryImageRefFactory(buildSpec.registry) + : plainImageRefFactory + + const imageRefForService = (service: string, tag: string) => imageRef({ + image: `preevy-${composeModel.name}-${service}`, + tag, + }) + + const services = mapValues( + pickBy(composeModel.services ?? {}, hasProp('build')), + ({ build, image }, serviceName) => { + const latestImage = imageRefForService(serviceName, 'latest') + const thisImage = imageRefForService(serviceName, tagSuffix) + + const cacheFrom = build.cache_from ?? [] + const cacheTo = build.cache_to ?? [] + const tags = build.tags ?? [] + + if (buildSpec.registry && buildSpec.cacheFromRegistry) { + cacheTo.push(`type=registry,ref=${latestImage},mode=max,oci-mediatypes=true,image-manifest=true`) + cacheFrom.push(latestImage) + cacheFrom.push(thisImage) + } + + tags.push(latestImage) + tags.push(thisImage) + + return { + image: image ?? thisImage, + build: { + ...build, + tags, + cache_from: cacheFrom, + cache_to: cacheTo, + }, + } + }, + ) + + const buildModel: ComposeModel = { name: composeModel.name, services } + + const createBakeArgs = (modelFilename: string) => [ + '-f', modelFilename, + ...buildSpec.registry ? ['--push'] : ['--load'], + ...buildSpec.builder ? [`--builder=${buildSpec.builder}`] : [], + ...buildSpec.noCache ? ['--no-cache'] : [], + `--set=*.platform=${machineDockerPlatform}`, + ] + + const deployModel: ComposeModel = { + ...composeModel, + services: { + ...mapValues(composeModel.services, (service, serviceName) => ({ + ...service, + image: buildModel.services?.[serviceName]?.image ?? service.image, + })), + }, + } + + return { buildModel, createBakeArgs, deployModel } +} diff --git a/packages/core/src/closable.ts b/packages/core/src/closable.ts deleted file mode 100644 index 90c6f5c1..00000000 --- a/packages/core/src/closable.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { isPromise } from 'util/types' - -export type Closable = { close: () => void | Promise } -export const withClosable = async ( - f: (closable: C, ...args: Args) => Return, - closable: C, - ...args: Args -) => { - try { - const result = f(closable, ...args) - return isPromise(result) - ? await result - : result - } finally { - await closable.close() - } -} diff --git a/packages/core/src/commands/build.ts b/packages/core/src/commands/build.ts new file mode 100644 index 00000000..cf829633 --- /dev/null +++ b/packages/core/src/commands/build.ts @@ -0,0 +1,58 @@ +import fs from 'fs' +import path from 'path' +import yaml from 'yaml' +import { spawn } from 'child_process' +import { ComposeModel } from '../compose' +import { Logger } from '../log' +import { BuildSpec, generateBuild } from '../build' +import { gitContext } from '../git' +import { childProcessPromise } from '../child-process' +import { telemetryEmitter } from '../telemetry' +import { measureTime } from '../timing' + +const buildCommand = async ({ + log, + composeModel, + projectLocalDataDir, + cwd, + buildSpec, + machineDockerPlatform, + env, +}: { + log: Logger + composeModel: ComposeModel + projectLocalDataDir: string + cwd: string + buildSpec: BuildSpec + machineDockerPlatform: string + env?: Record +}) => { + const { buildModel, createBakeArgs, deployModel } = generateBuild({ + composeModel, + buildSpec, + gitHash: await gitContext(cwd)?.commit({ short: true }), + machineDockerPlatform, + }) + + const modelStr = yaml.stringify(buildModel) + log.debug('build model', modelStr) + const modelFilename = path.join(projectLocalDataDir, 'docker-compose.build.yaml') + await fs.promises.writeFile(modelFilename, modelStr, 'utf-8') + + const dockerArgs = [ + ...['buildx', 'bake'], + ...createBakeArgs(modelFilename), + ] + + log.info(`Running: docker ${dockerArgs.join(' ')}`) + const { elapsedTimeSec } = await measureTime(() => childProcessPromise(spawn('docker', dockerArgs, { stdio: 'inherit', cwd, env }))) + telemetryEmitter().capture('build success', { + elapsed_sec: elapsedTimeSec, + has_registry: Boolean(buildSpec.registry), + }) + log.info(`Build step done in ${elapsedTimeSec.toLocaleString(undefined, { maximumFractionDigits: 2 })}s`) + + return { buildModel, deployModel } +} + +export default buildCommand diff --git a/packages/core/src/commands/index.ts b/packages/core/src/commands/index.ts index baeae5f8..59b95d9c 100644 --- a/packages/core/src/commands/index.ts +++ b/packages/core/src/commands/index.ts @@ -1,7 +1,6 @@ -import up from './up' -import ls from './ls' -import shell from './shell' +export { default as up } from './up' +export { default as ls } from './ls' +export { default as shell } from './shell' -export { up, ls, shell } export { urls } from './urls' export * as proxy from './proxy' diff --git a/packages/core/src/commands/model.ts b/packages/core/src/commands/model.ts new file mode 100644 index 00000000..16a6eed0 --- /dev/null +++ b/packages/core/src/commands/model.ts @@ -0,0 +1,80 @@ +import { MachineStatusCommand, ScriptInjection } from '@preevy/common' +import path from 'path' +import { rimraf } from 'rimraf' +import { TunnelOpts } from '../ssh' +import { ComposeModel, remoteComposeModel } from '../compose' +import { createCopiedFileInDataDir } from '../remote-files' +import { Logger } from '../log' +import { EnvId } from '../env-id' + +const composeModel = async ({ + debug, + machineStatusCommand, + userAndGroup, + tunnelOpts, + userSpecifiedProjectName, + userSpecifiedServices, + scriptInjections, + composeFiles, + log, + dataDir, + allowedSshHostKeys: hostKey, + sshTunnelPrivateKey, + cwd, + version, + envId, + expectedServiceUrls, + projectName, + modelFilter, +}: { + debug: boolean + machineStatusCommand?: MachineStatusCommand + userAndGroup: [string, string] + tunnelOpts: TunnelOpts + userSpecifiedProjectName: string | undefined + userSpecifiedServices: string[] + composeFiles: string[] + log: Logger + dataDir: string + scriptInjections?: Record + sshTunnelPrivateKey: string | Buffer + allowedSshHostKeys: Buffer + cwd: string + version: string + envId: EnvId + expectedServiceUrls: { name: string; port: number; url: string }[] + projectName: string + modelFilter: (userModel: ComposeModel) => Promise +}) => { + const projectLocalDataDir = path.join(dataDir, projectName) + await rimraf(projectLocalDataDir) + + const createCopiedFile = createCopiedFileInDataDir({ projectLocalDataDir }) + + const remoteModel = await remoteComposeModel({ + debug, + userSpecifiedProjectName, + userSpecifiedServices, + composeFiles, + log, + cwd, + expectedServiceUrls, + projectName, + modelFilter, + agentSettings: { + allowedSshHostKeys: hostKey, + sshTunnelPrivateKey, + userAndGroup, + createCopiedFile, + envId, + tunnelOpts, + version, + machineStatusCommand, + scriptInjections, + }, + }) + + return { ...remoteModel, projectLocalDataDir, createCopiedFile } +} + +export default composeModel diff --git a/packages/core/src/commands/up.ts b/packages/core/src/commands/up.ts new file mode 100644 index 00000000..274ad377 --- /dev/null +++ b/packages/core/src/commands/up.ts @@ -0,0 +1,163 @@ +import { MachineStatusCommand, ScriptInjection } from '@preevy/common' +import yaml from 'yaml' +import { TunnelOpts } from '../ssh' +import { ComposeModel, composeModelFilename, localComposeClient } from '../compose' +import { dockerEnvContext } from '../docker' +import { MachineConnection } from '../driver' +import { remoteProjectDir } from '../remote-files' +import { Logger } from '../log' +import { FileToCopy, uploadWithSpinner } from '../upload-files' +import { EnvId } from '../env-id' +import { BuildSpec } from '../build' +import modelCommand from './model' +import buildCommand from './build' +import { CommandExecuter } from '../command-executer' +import { telemetryEmitter } from '../telemetry' +import { measureTime } from '../timing' + +const uploadFiles = async ({ + log, + filesToCopy, + exec, + skipUnchangedFiles, + remoteDir, +}: { + log: Logger + filesToCopy: FileToCopy[] + exec: CommandExecuter + skipUnchangedFiles: boolean + remoteDir: string +}) => { + await exec(`mkdir -p "${remoteDir}"`) + + log.debug('Files to copy', filesToCopy) + + await uploadWithSpinner(exec, remoteDir, filesToCopy, skipUnchangedFiles) +} + +const up = async ({ + debug, + machineStatusCommand, + userAndGroup, + dockerPlatform, + connection, + tunnelOpts, + userSpecifiedProjectName, + userSpecifiedServices, + scriptInjections, + composeFiles, + log, + dataDir, + allowedSshHostKeys, + sshTunnelPrivateKey, + cwd, + skipUnchangedFiles, + version, + envId, + expectedServiceUrls, + projectName, + buildSpec, + modelFilter, +}: { + debug: boolean + machineStatusCommand?: MachineStatusCommand + userAndGroup: [string, string] + dockerPlatform: string + connection: Pick + tunnelOpts: TunnelOpts + userSpecifiedProjectName: string | undefined + userSpecifiedServices: string[] + composeFiles: string[] + log: Logger + dataDir: string + scriptInjections?: Record + sshTunnelPrivateKey: string | Buffer + allowedSshHostKeys: Buffer + cwd: string + skipUnchangedFiles: boolean + version: string + envId: EnvId + expectedServiceUrls: { name: string; port: number; url: string }[] + projectName: string + buildSpec?: BuildSpec + modelFilter: (userModel: ComposeModel) => Promise +}) => { + const remoteDir = remoteProjectDir(projectName) + + const { + model, + filesToCopy, + projectLocalDataDir, + createCopiedFile, + } = await modelCommand({ + debug, + log, + machineStatusCommand, + userAndGroup, + cwd, + tunnelOpts, + userSpecifiedProjectName, + userSpecifiedServices, + scriptInjections, + version, + envId, + allowedSshHostKeys, + composeFiles, + dataDir, + expectedServiceUrls, + projectName, + sshTunnelPrivateKey, + modelFilter, + }) + + log.debug('build spec: %j', buildSpec ?? 'none') + + let composeModel = model + + if (buildSpec) { + await using dockerContext = await dockerEnvContext({ connection, log }) + + composeModel = (await buildCommand({ + log, + buildSpec, + cwd, + composeModel, + projectLocalDataDir, + machineDockerPlatform: dockerPlatform, + env: dockerContext.env, + })).deployModel + } + + const modelStr = yaml.stringify(composeModel) + log.debug('model', modelStr) + const composeFilePath = await createCopiedFile(composeModelFilename, modelStr) + filesToCopy.push(composeFilePath) + + await uploadFiles({ log, filesToCopy, exec: connection.exec, skipUnchangedFiles, remoteDir }) + + const compose = localComposeClient({ + composeFiles: [composeFilePath.local], + projectDirectory: cwd, + }) + + const composeArgs = [ + ...debug ? ['--verbose'] : [], + 'up', '-d', '--remove-orphans', '--no-build', + ] + + log.info(`Running: docker compose up ${composeArgs.join(' ')}`) + + await using dockerContext = await dockerEnvContext({ connection, log }) + + const { elapsedTimeSec } = await measureTime(() => compose.spawnPromise(composeArgs, { stdio: 'inherit', env: dockerContext.env })) + telemetryEmitter().capture('deploy success', { + elapsed_sec: elapsedTimeSec, + with_build: Boolean(buildSpec), + has_registry: Boolean(buildSpec?.registry), + }) + log.info(`Deploy step done in ${elapsedTimeSec.toLocaleString(undefined, { maximumFractionDigits: 2 })}s`) + + return { composeModel, projectLocalDataDir } +} + +export default up diff --git a/packages/core/src/commands/up/index.ts b/packages/core/src/commands/up/index.ts deleted file mode 100644 index 6742b973..00000000 --- a/packages/core/src/commands/up/index.ts +++ /dev/null @@ -1,183 +0,0 @@ -import { COMPOSE_TUNNEL_AGENT_SERVICE_NAME, ScriptInjection, formatPublicKey, readOrUndefined } from '@preevy/common' -import fs from 'fs' -import path from 'path' -import { rimraf } from 'rimraf' -import yaml from 'yaml' -import { TunnelOpts } from '../../ssh' -import { composeModelFilename, fixModelForRemote, localComposeClient, addScriptInjectionsToModel } from '../../compose' -import { ensureCustomizedMachine } from './machine' -import { wrapWithDockerSocket } from '../../docker' -import { addComposeTunnelAgentService } from '../../compose-tunnel-agent-client' -import { MachineCreationDriver, MachineDriver, MachineBase } from '../../driver' -import { remoteProjectDir } from '../../remote-files' -import { Logger } from '../../log' -import { FileToCopy, uploadWithSpinner } from '../../upload-files' -import { envMetadata } from '../../env-metadata' -import { EnvId } from '../../env-id' - -const createCopiedFileInDataDir = ( - { projectLocalDataDir, filesToCopy } : { - projectLocalDataDir: string - filesToCopy: FileToCopy[] - } -) => async ( - filename: string, - content: string | Buffer -): Promise<{ local: string; remote: string }> => { - const local = path.join(projectLocalDataDir, filename) - const result = { local, remote: filename } - if (await readOrUndefined(local) === Buffer.from(content)) { - return result - } - await fs.promises.mkdir(path.dirname(local), { recursive: true }) - await fs.promises.writeFile(local, content, { flag: 'w' }) - filesToCopy.push(result) - return result -} - -const calcComposeUpArgs = ({ userSpecifiedServices, debug, cwd } : { - userSpecifiedServices: string[] - debug: boolean - cwd: string -}) => { - const upServices = userSpecifiedServices.length - ? userSpecifiedServices.concat(COMPOSE_TUNNEL_AGENT_SERVICE_NAME) - : [] - - return [ - ...debug ? ['--verbose'] : [], - '--project-directory', cwd, - 'up', '-d', '--remove-orphans', '--build', - ...upServices, - ] -} - -const serviceLinkEnvVars = ( - expectedServiceUrls: { name: string; port: number; url: string }[], -) => Object.fromEntries( - expectedServiceUrls - .map(({ name, port, url }) => [`PREEVY_BASE_URI_${name.replace(/[^a-zA-Z0-9_]/g, '_')}_${port}`.toUpperCase(), url]) -) - -const up = async ({ - debug, - machineDriver, - machineDriverName, - machineCreationDriver, - tunnelOpts, - userSpecifiedProjectName, - userSpecifiedServices, - scriptInjections, - composeFiles, - log, - dataDir, - allowedSshHostKeys: hostKey, - sshTunnelPrivateKey, - cwd, - skipUnchangedFiles, - version, - envId, - expectedServiceUrls, - projectName, -}: { - debug: boolean - machineDriver: MachineDriver - machineDriverName: string - machineCreationDriver: MachineCreationDriver - tunnelOpts: TunnelOpts - userSpecifiedProjectName: string | undefined - userSpecifiedServices: string[] - composeFiles: string[] - log: Logger - dataDir: string - scriptInjections?: Record - sshTunnelPrivateKey: string | Buffer - allowedSshHostKeys: Buffer - cwd: string - skipUnchangedFiles: boolean - version: string - envId: EnvId - expectedServiceUrls: { name: string; port: number; url: string }[] - projectName: string -}): Promise<{ machine: MachineBase }> => { - const remoteDir = remoteProjectDir(projectName) - - log.debug(`Using compose files: ${composeFiles.join(', ')}`) - - const composeClientWithInjectedArgs = localComposeClient({ - composeFiles, - env: serviceLinkEnvVars(expectedServiceUrls), - projectName: userSpecifiedProjectName, - }) - - const { model: fixedModel, filesToCopy } = await fixModelForRemote( - { cwd, remoteBaseDir: remoteDir }, - await composeClientWithInjectedArgs.getModel() - ) - - const projectLocalDataDir = path.join(dataDir, projectName) - await rimraf(projectLocalDataDir) - - const createCopiedFile = createCopiedFileInDataDir({ projectLocalDataDir, filesToCopy }) - const [sshPrivateKeyFile, knownServerPublicKey] = await Promise.all([ - createCopiedFile('tunnel_client_private_key', sshTunnelPrivateKey), - createCopiedFile('tunnel_server_public_key', formatPublicKey(hostKey)), - ]) - - const { machine, connection, userAndGroup } = await ensureCustomizedMachine({ - machineDriver, machineCreationDriver, machineDriverName, envId, log, debug, - }) - - let remoteModel = addComposeTunnelAgentService({ - envId, - debug, - tunnelOpts, - sshPrivateKeyPath: path.posix.join(remoteDir, sshPrivateKeyFile.remote), - knownServerPublicKeyPath: path.posix.join(remoteDir, knownServerPublicKey.remote), - user: userAndGroup.join(':'), - machineStatusCommand: await machineDriver.machineStatusCommand(machine), - envMetadata: await envMetadata({ envId, version }), - composeModelPath: path.posix.join(remoteDir, composeModelFilename), - privateMode: false, - defaultAccess: 'public', - composeProject: projectName, - }, fixedModel) - - if (scriptInjections) { - remoteModel = addScriptInjectionsToModel( - remoteModel, - serviceName => (serviceName !== COMPOSE_TUNNEL_AGENT_SERVICE_NAME ? scriptInjections : undefined), - ) - } - - try { - const { exec } = connection - - const modelStr = yaml.stringify(remoteModel) - log.debug('model', modelStr) - const composeFilePath = await createCopiedFile(composeModelFilename, modelStr) - - await exec(`mkdir -p "${remoteDir}"`) - - log.debug('Files to copy', filesToCopy) - - await uploadWithSpinner(exec, remoteDir, filesToCopy, skipUnchangedFiles) - - const compose = localComposeClient({ - composeFiles: [composeFilePath.local], - projectName: userSpecifiedProjectName, - }) - const composeArgs = calcComposeUpArgs({ userSpecifiedServices, debug, cwd }) - - const withDockerSocket = wrapWithDockerSocket({ connection, log }) - - log.info(`Running: docker compose up ${composeArgs.join(' ')}`) - await withDockerSocket(() => compose.spawnPromise(composeArgs, { stdio: 'inherit' })) - } finally { - await connection.close() - } - - return { machine } -} - -export default up diff --git a/packages/core/src/compose-tunnel-agent-client.ts b/packages/core/src/compose-tunnel-agent-client.ts index 47fb8054..426d0c8c 100644 --- a/packages/core/src/compose-tunnel-agent-client.ts +++ b/packages/core/src/compose-tunnel-agent-client.ts @@ -3,7 +3,7 @@ import fetch from 'node-fetch' import retry from 'p-retry' import util from 'util' import { mapValues, merge } from 'lodash' -import { COMPOSE_TUNNEL_AGENT_PORT, COMPOSE_TUNNEL_AGENT_SERVICE_LABELS, COMPOSE_TUNNEL_AGENT_SERVICE_NAME, MachineStatusCommand, dateReplacer } from '@preevy/common' +import { COMPOSE_TUNNEL_AGENT_PORT, COMPOSE_TUNNEL_AGENT_SERVICE_LABELS, COMPOSE_TUNNEL_AGENT_SERVICE_NAME, MachineStatusCommand, ScriptInjection, dateReplacer } from '@preevy/common' import { ComposeModel, ComposeService, composeModelFilename } from './compose/model' import { TunnelOpts } from './ssh/url' import { Tunnel } from './tunneling' @@ -12,6 +12,7 @@ import { EnvMetadata, driverMetadataFilename } from './env-metadata' import { REMOTE_DIR_BASE } from './remote-files' import { isPacked, pkgSnapshotDir } from './pkg' import { EnvId } from './env-id' +import { addScriptInjectionsToServices } from './compose/script-injection' const COMPOSE_TUNNEL_AGENT_DIR = path.join(path.dirname(require.resolve('@preevy/compose-tunnel-agent')), '..') @@ -62,6 +63,7 @@ export const addComposeTunnelAgentService = ( profileThumbprint, privateMode, defaultAccess, + scriptInjections, }: { tunnelOpts: TunnelOpts sshPrivateKeyPath: string @@ -76,12 +78,13 @@ export const addComposeTunnelAgentService = ( profileThumbprint?: string privateMode: boolean defaultAccess: 'private' | 'public' + scriptInjections?: (serviceName: string, serviceDef: ComposeService) => Record | undefined }, model: ComposeModel, ): ComposeModel => ({ ...model, services: { - ...model.services, + ...scriptInjections ? addScriptInjectionsToServices(model.services, scriptInjections) : model.services, [COMPOSE_TUNNEL_AGENT_SERVICE_NAME]: merge(baseDockerProxyService(), { restart: 'always', diff --git a/packages/core/src/compose/client.ts b/packages/core/src/compose/client.ts index fe89e71b..8706dbac 100644 --- a/packages/core/src/compose/client.ts +++ b/packages/core/src/compose/client.ts @@ -37,11 +37,13 @@ export const getExposedTcpServicePorts = (model: Pick) .map(({ target }) => target), })) -const composeFileArgs = ( - composeFiles: string[] | Buffer, - projectName?: string, -) => [ +const composeFileArgs = ({ composeFiles, projectName, projectDirectory }: { + composeFiles: string[] | Buffer + projectName?: string + projectDirectory?: string +}) => [ ...(projectName ? ['-p', projectName] : []), + ...(projectDirectory ? ['--project-directory', projectDirectory] : []), ...(Buffer.isBuffer(composeFiles) ? ['-f', '-'] : composeFiles.flatMap(file => ['-f', file])), ] @@ -61,11 +63,11 @@ const composeClient = ( throw e }) - const getModel = async () => yaml.parse(await execComposeCommand(['convert'])) as ComposeModel + const getModel = async (services: string[] = []) => yaml.parse(await execComposeCommand(['convert', ...services])) as ComposeModel return { getModel, - getModelOrError: async () => await getModel().catch(e => { + getModelOrError: async (services: string[] = []) => await getModel(services).catch(e => { if (e instanceof DockerIsNotInstalled || (e instanceof ProcessError && (e.code === DOCKER_COMPOSE_NO_CONFIGURATION_FILE_ERROR_CODE))) { return new LoadComposeFileError(e) @@ -85,10 +87,11 @@ export type ComposeClient = ReturnType type ParametersExceptFirst = F extends (arg0: any, ...rest: infer R) => any ? R : never; export const localComposeClient = ( - { composeFiles, projectName, env }: { + { composeFiles, projectName, env, projectDirectory }: { composeFiles: string[] | Buffer projectName?: string env?: NodeJS.ProcessEnv + projectDirectory?: string }, ) => { const insertStdin = (stdio: StdioOptions | undefined) => { @@ -104,7 +107,7 @@ export const localComposeClient = ( return [null, null, null] } - const fileArgs = composeFileArgs(composeFiles, projectName) + const fileArgs = composeFileArgs({ composeFiles, projectName, projectDirectory }) const spawnComposeArgs = (...[args, opts]: ParametersExceptFirst): Parameters => [ 'docker', diff --git a/packages/core/src/compose/model.ts b/packages/core/src/compose/model.ts index 57c0791e..b9553e46 100644 --- a/packages/core/src/compose/model.ts +++ b/packages/core/src/compose/model.ts @@ -1,9 +1,3 @@ -import { asyncMap, asyncToArray } from 'iter-tools-es' -import { mapValues } from 'lodash' -import path from 'path' -import { asyncMapValues } from '../async' -import { lstatOrUndefined } from '../files' -import { FileToCopy } from '../upload-files' import { PreevyConfig } from '../config' export type ComposeSecretOrConfig = { @@ -27,10 +21,15 @@ export type ComposeBindVolume = { export type ComposeVolume = { type: 'volume' | 'tmpfs' | 'npipe' } | ComposeBindVolume -type ComposeBuild = { +export type ComposeBuild = { context: string target?: string dockerfile?: string + tags?: string[] + cache_from?: string[] + cache_to?: string[] + platforms?: string[] + no_cache?: boolean } type ComposePort = { @@ -52,6 +51,8 @@ export type ComposeService = { environment?: Record | EnvString[] user?: string labels?: Record + image?: string + platform?: string } export type ComposeModel = { @@ -64,95 +65,4 @@ export type ComposeModel = { 'x-preevy'?: PreevyConfig } -const volumeSkipList = [ - /^\/var\/log(\/|$)/, - /^\/$/, -] - -const toPosix = (x:string) => x.split(path.sep).join(path.posix.sep) - -export const fixModelForRemote = async ( - { skipServices = [], cwd, remoteBaseDir }: { - skipServices?: string[] - cwd: string - remoteBaseDir: string - }, - model: ComposeModel, -): Promise<{ model: Required>; filesToCopy: FileToCopy[] }> => { - const filesToCopy: FileToCopy[] = [] - - const remotePath = (absolutePath: string) => { - if (!path.isAbsolute(absolutePath)) { - throw new Error(`expected absolute path: "${absolutePath}"`) - } - const relativePath = toPosix(path.relative(cwd, absolutePath)) - - return relativePath.startsWith('..') - ? path.posix.join('absolute', absolutePath) - : path.posix.join('relative', relativePath) - } - - const overrideSecretsOrConfigs = ( - c?: Record, - ) => mapValues(c ?? {}, secretOrConfig => { - const remote = remotePath(secretOrConfig.file) - filesToCopy.push({ local: secretOrConfig.file, remote }) - return { ...secretOrConfig, file: path.posix.join(remoteBaseDir, remote) } - }) - - const overrideSecrets = overrideSecretsOrConfigs(model.secrets) - const overrideConfigs = overrideSecretsOrConfigs(model.configs) - - const services = model.services ?? {} - - const overrideServices = await asyncMapValues(services, async (service, serviceName) => { - if (skipServices.includes(serviceName)) { - return service - } - - return ({ - ...service, - - volumes: service.volumes && await asyncToArray(asyncMap(async volume => { - if (volume.type === 'volume') { - return volume - } - - if (volume.type !== 'bind') { - throw new Error(`Unsupported volume type: ${volume.type} in service ${serviceName}`) - } - if (volumeSkipList.some(re => re.test(volume.source))) { - return volume - } - - const remote = remotePath(volume.source) - const stats = await lstatOrUndefined(volume.source) - - if (stats) { - if (!stats.isDirectory() && !stats.isFile() && !stats.isSymbolicLink()) { - return volume - } - - // ignore non-existing files like docker and compose do, - // they will be created as directories in the container - filesToCopy.push({ local: volume.source, remote }) - } - - return { ...volume, source: path.posix.join(remoteBaseDir, remote) } - }, service.volumes)), - }) - }) - - return { - model: { - ...model, - secrets: overrideSecrets, - configs: overrideConfigs, - services: overrideServices, - networks: model.networks ?? {}, - }, - filesToCopy, - } -} - export const composeModelFilename = 'docker-compose.yaml' diff --git a/packages/core/src/compose/remote.ts b/packages/core/src/compose/remote.ts index 12283555..bf14980d 100644 --- a/packages/core/src/compose/remote.ts +++ b/packages/core/src/compose/remote.ts @@ -1,9 +1,218 @@ import yaml from 'yaml' +import path from 'path' +import { mapValues } from 'lodash' +import { asyncMap, asyncToArray } from 'iter-tools-es' +import { COMPOSE_TUNNEL_AGENT_SERVICE_NAME, MachineStatusCommand, ScriptInjection, formatPublicKey } from '@preevy/common' import { MachineConnection } from '../driver' -import { ComposeModel, composeModelFilename } from './model' -import { REMOTE_DIR_BASE } from '../remote-files' +import { ComposeModel, ComposeSecretOrConfig, composeModelFilename } from './model' +import { REMOTE_DIR_BASE, remoteProjectDir } from '../remote-files' +import { TunnelOpts } from '../ssh' +import { addComposeTunnelAgentService } from '../compose-tunnel-agent-client' +import { Logger } from '../log' +import { FileToCopy } from '../upload-files' +import { envMetadata } from '../env-metadata' +import { EnvId } from '../env-id' +import { asyncMapValues } from '../async' +import { lstatOrUndefined } from '../files' +import { localComposeClient } from './client' -export const remoteUserModel = async (connection: MachineConnection) => { +export const fetchRemoteUserModel = async (connection: MachineConnection) => { const userModelStr = (await connection.exec(`cat ${REMOTE_DIR_BASE}/projects/*/${composeModelFilename}`)).stdout return yaml.parse(userModelStr) as ComposeModel } + +const serviceLinkEnvVars = ( + expectedServiceUrls: { name: string; port: number; url: string }[], +) => Object.fromEntries( + expectedServiceUrls + .map(({ name, port, url }) => [`PREEVY_BASE_URI_${name.replace(/[^a-zA-Z0-9_]/g, '_')}_${port}`.toUpperCase(), url]) +) + +const volumeSkipList = [ + /^\/var\/log(\/|$)/, + /^\/$/, +] + +const toPosix = (x:string) => x.split(path.sep).join(path.posix.sep) + +const fixModelForRemote = async ( + { skipServices = [], cwd, remoteBaseDir }: { + skipServices?: string[] + cwd: string + remoteBaseDir: string + }, + model: ComposeModel, +): Promise<{ model: Required>; filesToCopy: FileToCopy[] }> => { + const filesToCopy: FileToCopy[] = [] + + const remotePath = (absolutePath: string) => { + if (!path.isAbsolute(absolutePath)) { + throw new Error(`expected absolute path: "${absolutePath}"`) + } + const relativePath = toPosix(path.relative(cwd, absolutePath)) + + return relativePath.startsWith('..') + ? path.posix.join('absolute', absolutePath) + : path.posix.join('relative', relativePath) + } + + const overrideSecretsOrConfigs = ( + c?: Record, + ) => mapValues(c ?? {}, secretOrConfig => { + const remote = remotePath(secretOrConfig.file) + filesToCopy.push({ local: secretOrConfig.file, remote }) + return { ...secretOrConfig, file: path.posix.join(remoteBaseDir, remote) } + }) + + const overrideSecrets = overrideSecretsOrConfigs(model.secrets) + const overrideConfigs = overrideSecretsOrConfigs(model.configs) + + const services = model.services ?? {} + + const overrideServices = await asyncMapValues(services, async (service, serviceName) => { + if (skipServices.includes(serviceName)) { + return service + } + + return ({ + ...service, + + volumes: service.volumes && await asyncToArray(asyncMap(async volume => { + if (volume.type === 'volume') { + return volume + } + + if (volume.type !== 'bind') { + throw new Error(`Unsupported volume type: ${volume.type} in service ${serviceName}`) + } + if (volumeSkipList.some(re => re.test(volume.source))) { + return volume + } + + const remote = remotePath(volume.source) + const stats = await lstatOrUndefined(volume.source) + + if (stats) { + if (!stats.isDirectory() && !stats.isFile() && !stats.isSymbolicLink()) { + return volume + } + + // ignore non-existing files like docker and compose do, + // they will be created as directories in the container + filesToCopy.push({ local: volume.source, remote }) + } + + return { ...volume, source: path.posix.join(remoteBaseDir, remote) } + }, service.volumes)), + }) + }) + + return { + model: { + ...model, + secrets: overrideSecrets, + configs: overrideConfigs, + services: overrideServices, + networks: model.networks ?? {}, + }, + filesToCopy, + } +} + +type AgentSettings = { + version: string + envId: EnvId + tunnelOpts: TunnelOpts + sshTunnelPrivateKey: string | Buffer + allowedSshHostKeys: Buffer + userAndGroup: [string, string] + machineStatusCommand?: MachineStatusCommand + scriptInjections?: Record + createCopiedFile: (filename: string, content: string | Buffer) => Promise +} + +export const remoteComposeModel = async ({ + debug, + userSpecifiedProjectName, + userSpecifiedServices, + composeFiles, + log, + cwd, + expectedServiceUrls, + projectName, + agentSettings, + modelFilter, +}: { + debug: boolean + userSpecifiedProjectName: string | undefined + userSpecifiedServices: string[] + composeFiles: string[] + log: Logger + cwd: string + expectedServiceUrls: { name: string; port: number; url: string }[] + projectName: string + agentSettings?: AgentSettings + modelFilter: (userModel: ComposeModel) => Promise +}) => { + const remoteDir = remoteProjectDir(projectName) + + log.debug(`Using compose files: ${composeFiles.join(', ')}`) + + const linkEnvVars = serviceLinkEnvVars(expectedServiceUrls) + + const composeClientWithInjectedArgs = localComposeClient({ + composeFiles, + env: linkEnvVars, + projectName: userSpecifiedProjectName, + projectDirectory: cwd, + }) + + const services = userSpecifiedServices.length + ? [...userSpecifiedServices].concat(COMPOSE_TUNNEL_AGENT_SERVICE_NAME) + : [] + + const { model: fixedModel, filesToCopy } = await fixModelForRemote( + { cwd, remoteBaseDir: remoteDir }, + await modelFilter(await composeClientWithInjectedArgs.getModel(services)), + ) + + let model: ComposeModel = fixedModel + if (agentSettings) { + const { + envId, + machineStatusCommand, + userAndGroup, + scriptInjections, + tunnelOpts, + version, + sshTunnelPrivateKey, + allowedSshHostKeys, + createCopiedFile, + } = agentSettings + + const [sshPrivateKeyFile, knownServerPublicKey] = await Promise.all([ + createCopiedFile('tunnel_client_private_key', sshTunnelPrivateKey), + createCopiedFile('tunnel_server_public_key', formatPublicKey(allowedSshHostKeys)), + ]) + + model = addComposeTunnelAgentService({ + envId, + debug, + tunnelOpts, + sshPrivateKeyPath: path.posix.join(remoteDir, sshPrivateKeyFile.remote), + knownServerPublicKeyPath: path.posix.join(remoteDir, knownServerPublicKey.remote), + user: userAndGroup.join(':'), + machineStatusCommand, + envMetadata: await envMetadata({ envId, version }), + composeModelPath: path.posix.join(remoteDir, composeModelFilename), + privateMode: false, + defaultAccess: 'public', + composeProject: projectName, + scriptInjections: scriptInjections && (() => scriptInjections), + }, fixedModel) + + filesToCopy.push(sshPrivateKeyFile, knownServerPublicKey) + } + + return { model, filesToCopy, linkEnvVars } +} diff --git a/packages/core/src/compose/script-injection.test.ts b/packages/core/src/compose/script-injection.test.ts index 72bd09d4..b5cc7e73 100644 --- a/packages/core/src/compose/script-injection.test.ts +++ b/packages/core/src/compose/script-injection.test.ts @@ -1,24 +1,21 @@ import { describe, expect, jest, beforeEach, it } from '@jest/globals' import { ScriptInjection } from '@preevy/common' import { ComposeModel } from './model' -import { addScriptInjectionsToModel } from './script-injection' +import { addScriptInjectionsToServices } from './script-injection' describe('addScriptInjectionsToModel', () => { - const model: ComposeModel = Object.freeze({ - name: 'my-app', - services: { - frontend1: {}, - frontend2: { - labels: { - other: 'value', - }, + const model: ComposeModel['services'] = Object.freeze({ + frontend1: {}, + frontend2: { + labels: { + other: 'value', }, - frontend3: {}, }, + frontend3: {}, }) let callback: jest.MockedFunction<(name: string) => Record | undefined> - let newModel: ComposeModel + let newModel: ComposeModel['services'] const injection: ScriptInjection = { src: 'https://mydomain.com/myscript.ts', @@ -28,7 +25,7 @@ describe('addScriptInjectionsToModel', () => { beforeEach(() => { callback = jest.fn(name => (['frontend1', 'frontend2'].includes(name) ? ({ test: injection }) : undefined)) - newModel = addScriptInjectionsToModel(model, callback) + newModel = addScriptInjectionsToServices(model, callback) }) it('injects the script for the first two services', () => { @@ -37,12 +34,12 @@ describe('addScriptInjectionsToModel', () => { 'preevy.inject_script.test.async': 'true', 'preevy.inject_script.test.path_regex': '.*', } - expect(newModel.services?.frontend1?.labels).toMatchObject(expectedLabels) - expect(newModel.services?.frontend2?.labels).toMatchObject({ other: 'value', ...expectedLabels }) + expect(newModel?.frontend1?.labels).toMatchObject(expectedLabels) + expect(newModel?.frontend2?.labels).toMatchObject({ other: 'value', ...expectedLabels }) }) it('does not inject the script for the last service', () => { - expect(newModel.services?.frontend3?.labels).toMatchObject({}) + expect(newModel?.frontend3?.labels).toMatchObject({}) }) it('calls the factory correctly', () => { diff --git a/packages/core/src/compose/script-injection.ts b/packages/core/src/compose/script-injection.ts index 0c92f1ee..9de6177d 100644 --- a/packages/core/src/compose/script-injection.ts +++ b/packages/core/src/compose/script-injection.ts @@ -13,10 +13,7 @@ const addScriptInjectionsToService = ( }, }) -export const addScriptInjectionsToModel = ( - model: ComposeModel, +export const addScriptInjectionsToServices = ( + services: ComposeModel['services'], factory: (serviceName: string, serviceDef: ComposeService) => Record | undefined, -): ComposeModel => ({ - ...model, - services: mapValues(model.services ?? {}, (def, name) => addScriptInjectionsToService(def, factory(name, def) ?? {})), -}) +): ComposeModel['services'] => mapValues(services, (def, name) => addScriptInjectionsToService(def, factory(name, def) ?? {})) diff --git a/packages/core/src/compose/service-links.ts b/packages/core/src/compose/service-links.ts new file mode 100644 index 00000000..fb9f9d60 --- /dev/null +++ b/packages/core/src/compose/service-links.ts @@ -0,0 +1,6 @@ +export const serviceLinkEnvVars = ( + expectedServiceUrls: { name: string; port: number; url: string }[], +) => Object.fromEntries( + expectedServiceUrls + .map(({ name, port, url }) => [`PREEVY_BASE_URI_${name.replace(/[^a-zA-Z0-9_]/g, '_')}_${port}`.toUpperCase(), url]) +) diff --git a/packages/core/src/docker.ts b/packages/core/src/docker.ts index d5acb4b6..1848a3d7 100644 --- a/packages/core/src/docker.ts +++ b/packages/core/src/docker.ts @@ -1,10 +1,11 @@ +import { omitBy } from 'lodash' import { Logger } from './log' import { MachineConnection, ForwardSocket } from './driver' import { withSpinner } from './spinner' -export type FuncWrapper = ( - f: () => Promise, -) => Promise +// export type FuncWrapper = ( +// f: (...args: Args) => Promise, +// ) => Promise const dockerHost = (s: string | ForwardSocket['address']) => ( typeof s === 'string' @@ -12,15 +13,14 @@ const dockerHost = (s: string | ForwardSocket['address']) => ( : `tcp://${s.host}:${s.port}` ) -export const wrapWithDockerSocket = ( - { connection, log }: { - connection: MachineConnection +export const dockerEnvContext = async ( + { connection, log, env = process.env }: { + connection: Pick log: Logger + env?: Record }, -): FuncWrapper => async ( - f: () => Promise, -): Promise => { - const { address, close } = await withSpinner( +): Promise }> => { + const { address, [Symbol.asyncDispose]: dispose } = await withSpinner( () => connection.dockerSocket(), { text: 'Connecting to remote docker socket...', successText: 'Connected to remote docker socket' }, ) @@ -32,7 +32,34 @@ export const wrapWithDockerSocket = ( delete process.env[k] }) - process.env.DOCKER_HOST = dockerHost(address) - - return await f().finally(close) + return { + env: { + ...omitBy(env, (_, k) => k.startsWith('DOCKER_')), + DOCKER_HOST: dockerHost(address), + }, + [Symbol.asyncDispose]: dispose, + } } + +// export const wrapWithDockerSocket = ( +// { connection, log }: { +// connection: Pick +// log: Logger +// }, +// ) => async ( +// f: (env: Record) => Promise, +// ): Promise => { +// const { address, close } = await withSpinner( +// () => connection.dockerSocket(), +// { text: 'Connecting to remote docker socket...', successText: 'Connected to remote docker socket' }, +// ) + +// log.debug(`Local socket: ${JSON.stringify(address)}`) + +// Object.keys(process.env).filter(k => k !== 'DOCKER_HOST' && k.startsWith('DOCKER_')).forEach(k => { +// log.warn(`deleting conflicting env var ${k}`) +// delete process.env[k] +// }) + +// return await f({ DOCKER_HOST: dockerHost(address) }).finally(close) +// } diff --git a/packages/core/src/driver/driver.ts b/packages/core/src/driver/driver.ts index fd07fe51..7f469a1a 100644 --- a/packages/core/src/driver/driver.ts +++ b/packages/core/src/driver/driver.ts @@ -1,31 +1,20 @@ -import { AddressInfo } from 'net' import { MachineStatusCommand } from '@preevy/common' import { PartialStdioOptions } from '../child-process' import { CommandExecuter } from '../command-executer' import { Profile } from '../profile' -import { MachineBase, PartialMachine, Resource, SpecDiffItem } from './machine' +import { MachineBase, PartialMachine, Resource, SpecDiffItem } from './machine-model' import { Store } from '../store' import { Logger } from '../log' -import { EnvMachineMetadata } from '../env-metadata' -export type ForwardOutStreamLocal = { - localSocket: string | AddressInfo - close: () => Promise -} - -export type ForwardSocket = { +export type ForwardSocket = AsyncDisposable & { address: { host: string; port: number } - close: () => Promise } -export type MachineConnection = { +export type MachineConnection = Disposable & { exec: CommandExecuter dockerSocket: () => Promise - close: () => Promise } -export type MachineMetadata = Omit - export type MachineDriver< Machine extends MachineBase = MachineBase, ResourceType extends string = string diff --git a/packages/core/src/driver/index.ts b/packages/core/src/driver/index.ts index 5d7b8817..6a9373c0 100644 --- a/packages/core/src/driver/index.ts +++ b/packages/core/src/driver/index.ts @@ -6,7 +6,8 @@ export { Resource, MachineResource, machineResourceType, -} from './machine' +} from './machine-model' +export * from './machine-operations' export { SshMachine, sshDriver, getStoredKey, getStoredKeyOrUndefined } from './ssh' export { machineStatusNodeExporterCommand } from './machine-status-node-exporter' export { diff --git a/packages/core/src/driver/machine.ts b/packages/core/src/driver/machine-model.ts similarity index 100% rename from packages/core/src/driver/machine.ts rename to packages/core/src/driver/machine-model.ts diff --git a/packages/core/src/commands/up/machine.ts b/packages/core/src/driver/machine-operations.ts similarity index 81% rename from packages/core/src/commands/up/machine.ts rename to packages/core/src/driver/machine-operations.ts index 6b09a689..26e4f75c 100644 --- a/packages/core/src/commands/up/machine.ts +++ b/packages/core/src/driver/machine-operations.ts @@ -1,20 +1,21 @@ import { EOL } from 'os' import retry from 'p-retry' import { dateReplacer } from '@preevy/common' -import { withSpinner } from '../../spinner' -import { MachineCreationDriver, SpecDiffItem, MachineDriver, MachineConnection, MachineBase, isPartialMachine, machineResourceType } from '../../driver' -import { telemetryEmitter } from '../../telemetry' -import { Logger } from '../../log' -import { scriptExecuter } from '../../remote-script-executer' -import { EnvMetadata, driverMetadataFilename } from '../../env-metadata' -import { REMOTE_DIR_BASE } from '../../remote-files' +import { withSpinner } from '../spinner' +import { telemetryEmitter } from '../telemetry' +import { Logger } from '../log' +import { scriptExecuter } from '../remote-script-executer' +import { EnvMetadata, driverMetadataFilename } from '../env-metadata' +import { REMOTE_DIR_BASE } from '../remote-files' +import { MachineBase, SpecDiffItem, isPartialMachine, machineResourceType } from './machine-model' +import { MachineConnection, MachineCreationDriver, MachineDriver } from './driver' const machineDiffText = (diff: SpecDiffItem[]) => diff .map(({ name, old, new: n }) => `* ${name}: ${old} -> ${n}`).join(EOL) type Origin = 'existing' | 'new-from-snapshot' | 'new-from-scratch' -const ensureMachine = async ({ +const ensureBareMachine = async ({ machineDriver, machineCreationDriver, envId, @@ -98,12 +99,17 @@ const writeMetadata = async ( }) } -const getUserAndGroup = async (connection: MachineConnection) => ( +export const getUserAndGroup = async (connection: Pick) => ( await connection.exec('echo "$(id -u):$(stat -c %g /var/run/docker.sock)"') ).stdout .trim() .split(':') as [string, string] +export const getDockerPlatform = async (connection: Pick) => { + const arch = (await connection.exec('docker info -f "{{.Architecture}}"')).stdout.trim() + return arch === 'aarch64' ? 'linux/arm64' : 'linux/amd64' +} + const customizeNewMachine = ({ log, debug, @@ -142,7 +148,7 @@ const customizeNewMachine = ({ retries: 5, onFailedAttempt: async err => { log.debug(`Failed to execute docker run hello-world: ${err}`) - await connection.close() + connection[Symbol.dispose]() connection = await machineDriver.connect(machine, { log, debug }) }, } @@ -150,6 +156,7 @@ const customizeNewMachine = ({ spinner.text = 'Finalizing...' const userAndGroup = await getUserAndGroup(connection) + const dockerPlatform = await getDockerPlatform(connection) await Promise.all([ writeMetadata(machine, machineDriverName, machineCreationDriver.metadata, connection, userAndGroup), @@ -160,10 +167,10 @@ const customizeNewMachine = ({ }), ]) - return { connection, userAndGroup, machine } + return { connection, userAndGroup, machine, dockerPlatform } } -export const ensureCustomizedMachine = async ({ +export const ensureMachine = async ({ machineDriver, machineCreationDriver, machineDriverName, @@ -177,10 +184,16 @@ export const ensureCustomizedMachine = async ({ envId: string log: Logger debug: boolean -}): Promise<{ machine: MachineBase; connection: MachineConnection; userAndGroup: [string, string] }> => { - const { machine, connection: connectionPromise, origin } = await ensureMachine( +}): Promise<{ + machine: MachineBase + connection: MachineConnection + userAndGroup: [string, string] + dockerPlatform: string +}> => { + const { machine, connection: connectionPromise, origin } = await ensureBareMachine( { machineDriver, machineCreationDriver, envId, log, debug }, ) + return await withSpinner(async spinner => { spinner.text = `Connecting to machine at ${machine.locationDescription}` const connection = await connectionPromise @@ -200,14 +213,16 @@ export const ensureCustomizedMachine = async ({ } const userAndGroup = await getUserAndGroup(connection) + const dockerPlatform = await getDockerPlatform(connection) + if (origin === 'new-from-snapshot') { spinner.text = 'Finalizing...' await writeMetadata(machine, machineDriverName, machineCreationDriver.metadata, connection, userAndGroup) } - return { machine, connection, userAndGroup } + return { machine, connection, userAndGroup, dockerPlatform } } catch (e) { - await connection.close() + connection[Symbol.dispose]() throw e } }, { opPrefix: 'Configuring machine', successText: 'Machine configured' }) diff --git a/packages/core/src/driver/ssh.ts b/packages/core/src/driver/ssh.ts index c33d9941..6d258386 100644 --- a/packages/core/src/driver/ssh.ts +++ b/packages/core/src/driver/ssh.ts @@ -9,7 +9,7 @@ import retry, { Options as RetryOptions } from 'p-retry' import { Store } from '../store' import { SshKeyPair, connectSshClient } from '../ssh' import { MachineConnection, MachineDriver } from './driver' -import { MachineBase } from './machine' +import { MachineBase } from './machine-model' import { sshKeysStore } from '../state' import { Logger } from '../log' @@ -57,13 +57,13 @@ export const sshDriver = ( ) return { - close: async () => connection.close(), + [Symbol.dispose]: () => connection[Symbol.dispose](), exec: connection.exec, dockerSocket: async () => { const host = '0.0.0.0' const forward = await connection.forwardOutStreamLocal({ port: 0, host }, '/var/run/docker.sock') return { - close: forward.close, + [Symbol.asyncDispose]: forward[Symbol.asyncDispose], address: { host, port: (forward.localSocket as AddressInfo).port }, } }, diff --git a/packages/core/src/env-id.ts b/packages/core/src/env-id.ts index d9aba845..4f87561d 100644 --- a/packages/core/src/env-id.ts +++ b/packages/core/src/env-id.ts @@ -1,6 +1,6 @@ import { detectCiProvider } from './ci-providers' import { gitContext } from './git' -import { ComposeModel } from './compose' +import { ComposeModel } from './compose/model' import { Logger } from './log' export type EnvId = string & { diff --git a/packages/core/src/git.ts b/packages/core/src/git.ts index e2390c43..65bd64d0 100644 --- a/packages/core/src/git.ts +++ b/packages/core/src/git.ts @@ -5,7 +5,9 @@ export function gitContext(cwd: string = process.cwd()) { const branchName = async () => await execGit('rev-parse --abbrev-ref HEAD') .catch(() => undefined) - const head = async () => await execGit('rev-parse HEAD') + const head = async ( + { short }: { short?: boolean } = { short: false }, + ) => await execGit(`rev-parse ${short ? '--short ' : ''}HEAD`) .catch(() => undefined) const author = async (commit?: string) => { diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 0ff9005f..0efedb33 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -18,11 +18,12 @@ export { getStoredKeyOrUndefined as getStoredSshKeyOrUndefined, ForwardSocket, machineStatusNodeExporterCommand, + ensureMachine, } from './driver' export { profileStore, Profile, ProfileStore, link, Org } from './profile' export { telemetryEmitter, registerEmitter, wireProcessExit, createTelemetryEmitter, machineId } from './telemetry' export { fsTypeFromUrl, Store, VirtualFS, localFsFromUrl, localFs } from './store' -export { localComposeClient, ComposeModel, resolveComposeFiles, getExposedTcpServicePorts, remoteUserModel, NoComposeFilesError, addScriptInjectionsToModel } from './compose' +export { localComposeClient, ComposeModel, resolveComposeFiles, getExposedTcpServicePorts, fetchRemoteUserModel as remoteUserModel, NoComposeFilesError, addScriptInjectionsToServices as addScriptInjectionsToModel } from './compose' export { withSpinner } from './spinner' export { findEnvId, findProjectName, findEnvIdByProjectName, validateEnvId, normalize as normalizeEnvId, EnvId } from './env-id' export { sshKeysStore } from './state' @@ -45,7 +46,8 @@ export { findComposeTunnelAgentUrl, } from './compose-tunnel-agent-client' export * as commands from './commands' -export { wrapWithDockerSocket } from './docker' +export { BuildSpec, ImageRegistry, parseRegistry } from './build' +export { dockerEnvContext } from './docker' export { FlatTunnel, flattenTunnels, @@ -57,7 +59,6 @@ export { } from './tunneling' export { TunnelOpts } from './ssh' export { Spinner } from './spinner' -export { withClosable } from './closable' export { generateBasicAuthCredentials as getUserCredentials, jwtGenerator, jwkThumbprint, jwkThumbprintUri, parseKey } from './credentials' export { ciProviders, detectCiProvider, CiProvider } from './ci-providers' export { paginationIterator } from './pagination' diff --git a/packages/core/src/nulls.ts b/packages/core/src/nulls.ts index 3b666c98..cb395809 100644 --- a/packages/core/src/nulls.ts +++ b/packages/core/src/nulls.ts @@ -44,3 +44,9 @@ export function extractDefined( ? defined.then(obj => obj[prop] as unknown as NonNullable) : (o as T)[prop] as NonNullable } + +export const hasProp = < + K extends string | symbol | number +>(prop: K) => < + T extends { [k in K]?: unknown } +>(obj: T): obj is T & { [k in K]-?: NonNullable } => Boolean(obj[prop]) diff --git a/packages/core/src/remote-files.ts b/packages/core/src/remote-files.ts index 2de1ad3c..4dca68da 100644 --- a/packages/core/src/remote-files.ts +++ b/packages/core/src/remote-files.ts @@ -1,5 +1,21 @@ import path from 'path' +import fs from 'fs' export const REMOTE_DIR_BASE = '/var/lib/preevy' export const remoteProjectDir = (projectName: string) => path.posix.join(REMOTE_DIR_BASE, 'projects', projectName) + +export const createCopiedFileInDataDir = ( + { projectLocalDataDir } : { + projectLocalDataDir: string + } +) => async ( + filename: string, + content: string | Buffer +): Promise<{ local: string; remote: string }> => { + const local = path.join(projectLocalDataDir, filename) + const result = { local, remote: filename } + await fs.promises.mkdir(path.dirname(local), { recursive: true }) + await fs.promises.writeFile(local, content, { flag: 'w' }) + return result +} diff --git a/packages/core/src/ssh/client/forward-out.ts b/packages/core/src/ssh/client/forward-out.ts index 89be2541..da0b003f 100644 --- a/packages/core/src/ssh/client/forward-out.ts +++ b/packages/core/src/ssh/client/forward-out.ts @@ -3,9 +3,8 @@ import net, { AddressInfo, ListenOptions } from 'net' import ssh2 from 'ssh2' import { Logger } from '../../log' -export type ForwardOutStreamLocal = { +export type ForwardOutStreamLocal = AsyncDisposable & { localSocket: string | AddressInfo - close: () => Promise } export const forwardOutStreamLocal = ({ ssh, log, listenAddress, remoteSocket, onClose }: { @@ -50,7 +49,7 @@ export const forwardOutStreamLocal = ({ ssh, log, listenAddress, remoteSocket, o reject(new Error(message)) return } - resolve({ localSocket: address, close: async () => { socketServer.close() } }) + resolve({ localSocket: address, [Symbol.asyncDispose]: async () => { socketServer.close() } }) }) .on('error', (err: unknown) => { log.error('socketServer error', err) diff --git a/packages/core/src/ssh/client/index.ts b/packages/core/src/ssh/client/index.ts index f13efb57..73d0ddd8 100644 --- a/packages/core/src/ssh/client/index.ts +++ b/packages/core/src/ssh/client/index.ts @@ -35,7 +35,7 @@ export const connectSshClient = async ( listenAddress: string | number | ListenOptions, remoteSocket: string, ) => forwardOutStreamLocal({ ssh, log, listenAddress, remoteSocket }), - close: () => { ssh.end() }, + [Symbol.dispose]: () => { ssh.end() }, } return self diff --git a/packages/core/src/ssh/client/sftp.ts b/packages/core/src/ssh/client/sftp.ts index 38f06ec5..fdc6bac9 100644 --- a/packages/core/src/ssh/client/sftp.ts +++ b/packages/core/src/ssh/client/sftp.ts @@ -111,7 +111,7 @@ export const sftpClient = ( files.map(f => self.putFile(f, options)), ).then(() => undefined), - close: () => sftp.end(), + [Symbol.dispose]: () => sftp.end(), } return self diff --git a/packages/core/src/telemetry/emitter.ts b/packages/core/src/telemetry/emitter.ts index 5a136260..d1c5b99d 100644 --- a/packages/core/src/telemetry/emitter.ts +++ b/packages/core/src/telemetry/emitter.ts @@ -1,4 +1,5 @@ import os from 'os' +import fs from 'fs' import crypto from 'crypto' import stringify from 'fast-safe-stringify' import fetch from 'node-fetch' @@ -20,23 +21,27 @@ type IdentifyFunction = { (id: string, person?: TelemetryProperties): void } -export const telemetryEmitter = async ({ dataDir, version, debug }: { +export const telemetryEmitter = async ({ dataDir, version, debug, filename }: { dataDir: string version: string debug: number + filename?: string }) => { const machineId = await memoizedMachineId(dataDir) let distinctId = machineId const groupIdentities = {} as Record const pendingEvents: TelemetryEvent[] = [] const runId = newRunId() + const file = filename ? fs.createWriteStream(filename, 'utf-8') : undefined // await fs.promises.open(filename, 'a') : undefined let debounceDisabled = false const flushLimit = pLimit(1) const flush = async () => await flushLimit(async () => { if (!pendingEvents.length) { return } - const body = stringify({ batch: pendingEvents.map(serializableEvent) }) + const batch = pendingEvents.map(serializableEvent) + const body = stringify({ batch }) + file?.write(batch.map(event => `${stringify(event)}${os.EOL}`).join('')) pendingEvents.length = 0 const response = await fetch(TELEMETRY_URL, { headers: { 'Content-Type': 'application/json' }, diff --git a/packages/core/src/timing.ts b/packages/core/src/timing.ts new file mode 100644 index 00000000..1c8a62a9 --- /dev/null +++ b/packages/core/src/timing.ts @@ -0,0 +1,6 @@ +export const measureTime = async (f: () => Promise) => { + const startTime = Date.now() + const result = await f() + const elapsedTimeSec = (new Date().getTime() - startTime) / 1000 + return { result, elapsedTimeSec } +} diff --git a/packages/core/src/tunneling/model.ts b/packages/core/src/tunneling/model.ts index 0ffe2001..1575f772 100644 --- a/packages/core/src/tunneling/model.ts +++ b/packages/core/src/tunneling/model.ts @@ -1,6 +1,7 @@ import { TunnelNameResolver } from '@preevy/common' import { generateSshKeyPair } from '../ssh/keypair' -import { ComposeModel, getExposedTcpServicePorts } from '../compose' +import { ComposeModel } from '../compose/model' +import { getExposedTcpServicePorts } from '../compose/client' type port = string type url = string diff --git a/packages/core/tsconfig.json b/packages/core/tsconfig.json index 478b3eac..df196378 100644 --- a/packages/core/tsconfig.json +++ b/packages/core/tsconfig.json @@ -1,7 +1,7 @@ { "compilerOptions": { "declaration": true, - "lib": ["ES2022"], + "lib": ["ES2022", "ESNext.Disposable"], "module": "CommonJS", "target": "ES2019", "moduleResolution": "node", diff --git a/packages/driver-azure/package.json b/packages/driver-azure/package.json index 689a1763..616ef445 100644 --- a/packages/driver-azure/package.json +++ b/packages/driver-azure/package.json @@ -30,8 +30,8 @@ "@types/inquirer-autocomplete-prompt": "^3.0.3", "@types/lodash": "^4.14.192", "@types/node": "18", - "@typescript-eslint/eslint-plugin": "6.7.4", - "@typescript-eslint/parser": "6.7.4", + "@typescript-eslint/eslint-plugin": "6.10.0", + "@typescript-eslint/parser": "6.10.0", "eslint": "^8.36.0", "eslint-config-airbnb": "^19.0.4", "eslint-config-oclif": "^4", @@ -44,7 +44,7 @@ "shx": "^0.3.3", "ts-node": "^10.9.1", "tslib": "^2.5.0", - "typescript": "^5.0.4" + "typescript": "^5.2.2" }, "scripts": { "lint": "eslint . --ext .ts,.tsx --cache", diff --git a/packages/driver-gce/package.json b/packages/driver-gce/package.json index f8734bd1..47b32222 100644 --- a/packages/driver-gce/package.json +++ b/packages/driver-gce/package.json @@ -26,8 +26,8 @@ "@types/inquirer-autocomplete-prompt": "^3.0.3", "@types/lodash": "^4.14.192", "@types/node": "18", - "@typescript-eslint/eslint-plugin": "6.7.4", - "@typescript-eslint/parser": "6.7.4", + "@typescript-eslint/eslint-plugin": "6.10.0", + "@typescript-eslint/parser": "6.10.0", "eslint": "^8.36.0", "eslint-config-airbnb": "^19.0.4", "eslint-config-oclif": "^4", @@ -40,7 +40,7 @@ "shx": "^0.3.3", "ts-node": "^10.9.1", "tslib": "^2.5.0", - "typescript": "^5.0.4" + "typescript": "^5.2.2" }, "scripts": { "lint": "eslint . --ext .ts,.tsx --cache", diff --git a/packages/driver-kube-pod/package.json b/packages/driver-kube-pod/package.json index 7944df74..cedb7fef 100644 --- a/packages/driver-kube-pod/package.json +++ b/packages/driver-kube-pod/package.json @@ -28,8 +28,8 @@ "@types/lodash": "^4.14.192", "@types/node": "18", "@types/stream-buffers": "^3.0.4", - "@typescript-eslint/eslint-plugin": "6.7.4", - "@typescript-eslint/parser": "6.7.4", + "@typescript-eslint/eslint-plugin": "6.10.0", + "@typescript-eslint/parser": "6.10.0", "eslint": "^8.36.0", "eslint-config-airbnb": "^19.0.4", "eslint-config-oclif": "^4", @@ -42,7 +42,7 @@ "shx": "^0.3.3", "ts-node": "^10.9.1", "tslib": "^2.5.0", - "typescript": "^5.0.4" + "typescript": "^5.2.2" }, "scripts": { "test": "yarn jest", diff --git a/packages/driver-kube-pod/src/driver/client/port-forward.ts b/packages/driver-kube-pod/src/driver/client/port-forward.ts index 020e5417..9e37cb72 100644 --- a/packages/driver-kube-pod/src/driver/client/port-forward.ts +++ b/packages/driver-kube-pod/src/driver/client/port-forward.ts @@ -3,9 +3,8 @@ import * as k8s from '@kubernetes/client-node' import { promisify } from 'util' import { Logger } from '@preevy/core' -type ForwardSocket = { +type ForwardSocket = AsyncDisposable & { localSocket: string | AddressInfo - close: () => Promise } type Closable = { close: () => void } @@ -46,7 +45,7 @@ const portForward = ( server.listen(listenAddress, () => { resolve({ localSocket: server.address() as string | AddressInfo, - close: () => { + [Symbol.asyncDispose]: () => { sockets.forEach(ws => ws.close()) return closeServer() }, diff --git a/packages/driver-kube-pod/src/driver/driver.ts b/packages/driver-kube-pod/src/driver/driver.ts index a43383e1..0cbeabc3 100644 --- a/packages/driver-kube-pod/src/driver/driver.ts +++ b/packages/driver-kube-pod/src/driver/driver.ts @@ -37,7 +37,7 @@ export const machineConnection = async ( log.debug(`Found pod "${pod.metadata?.name}"`) return ({ - close: async () => undefined, + [Symbol.dispose]: () => undefined, exec: async (command, opts) => { const { code, output } = await client.exec({ @@ -55,10 +55,15 @@ export const machineConnection = async ( dockerSocket: async () => { const host = '0.0.0.0' - const { localSocket, close } = await client.portForward(deployment, 2375, { host, port: 0 }) + + const { + localSocket, + [Symbol.asyncDispose]: dispose, + } = await client.portForward(deployment, 2375, { host, port: 0 }) + return { address: { host, port: (localSocket as AddressInfo).port }, - close, + [Symbol.asyncDispose]: dispose, } }, }) diff --git a/packages/driver-lightsail/package.json b/packages/driver-lightsail/package.json index c3656327..ce339956 100644 --- a/packages/driver-lightsail/package.json +++ b/packages/driver-lightsail/package.json @@ -26,8 +26,8 @@ "@types/inquirer-autocomplete-prompt": "^3.0.3", "@types/lodash": "^4.14.192", "@types/node": "18", - "@typescript-eslint/eslint-plugin": "6.7.4", - "@typescript-eslint/parser": "6.7.4", + "@typescript-eslint/eslint-plugin": "6.10.0", + "@typescript-eslint/parser": "6.10.0", "eslint": "^8.36.0", "eslint-config-airbnb": "^19.0.4", "eslint-config-oclif": "^4", @@ -40,7 +40,7 @@ "shx": "^0.3.3", "ts-node": "^10.9.1", "tslib": "^2.5.0", - "typescript": "^5.0.4" + "typescript": "^5.2.2" }, "scripts": { "lint": "eslint . --ext .ts,.tsx --cache", diff --git a/packages/plugin-github/README.md b/packages/plugin-github/README.md index 98f94642..b68b447c 100644 --- a/packages/plugin-github/README.md +++ b/packages/plugin-github/README.md @@ -8,13 +8,13 @@ This plugin is bundled with Preevy and enabled by default. To disable it, see [b ![Demo comment](./demo.png) -### Automatic comment at `up` and `down` +### Automatic PR comment at `up` and `down` Comment generation is done as part of the `up` and `down` core commands. -If a GitHub context is detected (e.g, when running in a GitHub actions job), it will post the comment automatically. +Preevy will post the comment if a GitHub PR and a GitHub token are detected in the context (e.g, when running in a GitHub Action or other [supported CI provider](#configuration-from-the-ci-provider-context)) or specified explicitly. See the [Configuration section](#configuration) for details. -### Manual comment using the `github` commands +### Manual PR comment using the `github` commands This plugin adds the following commands: @@ -24,6 +24,17 @@ This plugin adds the following commands: Run `preevy github pr comment --help` for details. +## GitHub Docker build cache + +Specify `--github-add-build-cache` at the `up` command to add [GitHub cache](https://docs.docker.com/build/ci/github-actions/cache/#github-cache) to your build directives. + +This will add the following directives to all services with a `build` section: + +```yaml + cache_to: type=gha,scope=/,mode=max + cache_from: type=gha,scope=/ +``` + ## Configuration At runtime, the plugin will attempt to detect the configuration it needs from environment variables and the git context. Options can be overridden using CLI flags and the Docker Compose file. @@ -115,7 +126,7 @@ The following flags can be specified at the Preevy CLI: -### Comment template +### PR comment template The generated PR comment can be customized by specifying a template in your Docker Compose file, or in a separate file (see above). The template is rendered by [`nunjucks`](https://mozilla.github.io/nunjucks/templating.html) and receives a context containing a `urls` property which is one of the following: diff --git a/packages/plugin-github/package.json b/packages/plugin-github/package.json index 819b0914..ae80c033 100644 --- a/packages/plugin-github/package.json +++ b/packages/plugin-github/package.json @@ -17,11 +17,11 @@ }, "devDependencies": { "@types/nunjucks": "^3.2.2", - "@typescript-eslint/eslint-plugin": "6.7.4", - "@typescript-eslint/parser": "6.7.4", + "@typescript-eslint/eslint-plugin": "6.10.0", + "@typescript-eslint/parser": "6.10.0", "eslint": "^8.36.0", "shx": "^0.3.3", - "typescript": "^5.0.4" + "typescript": "^5.2.2" }, "scripts": { "lint": "eslint . --ext .ts,.tsx --cache", diff --git a/packages/plugin-github/src/commands/github/pr/comment.ts b/packages/plugin-github/src/commands/github/pr/comment.ts index 3662815d..21f3f283 100644 --- a/packages/plugin-github/src/commands/github/pr/comment.ts +++ b/packages/plugin-github/src/commands/github/pr/comment.ts @@ -23,7 +23,7 @@ class CommentGithubPr extends BaseGithubPrCommand { '--json', ]) as FlatTunnel[] - const { flags } = await this.parse(CommentGithubPr) + const { flags } = this const config = await this.loadGithubPullRequestCommentConfig(flags) await upsertPreevyComment({ diff --git a/packages/plugin-github/src/commands/github/pr/uncomment.ts b/packages/plugin-github/src/commands/github/pr/uncomment.ts index 454fbcfb..877bcfc9 100644 --- a/packages/plugin-github/src/commands/github/pr/uncomment.ts +++ b/packages/plugin-github/src/commands/github/pr/uncomment.ts @@ -19,7 +19,7 @@ class UnCommentGithubPr extends BaseGithubPrCommand { } async run() { - const { flags } = await this.parse(UnCommentGithubPr) + const { flags } = this const config = await this.loadGithubPullRequestCommentConfig(flags) await upsertPreevyComment({ diff --git a/packages/plugin-github/src/flags.ts b/packages/plugin-github/src/flags.ts index e80cc607..823da760 100644 --- a/packages/plugin-github/src/flags.ts +++ b/packages/plugin-github/src/flags.ts @@ -48,6 +48,14 @@ export const commentTemplateFlagDef = { }), } as const +const buildFlagDef = { + 'add-build-cache': Flags.boolean({ + description: 'Add github cache to the build', + required: false, + helpGroup: HELP_GROUP, + }), +} as const + const flagPrefix = 'github' as const type Prefixed = { @@ -60,7 +68,12 @@ type Unprefixed = [K in keyof T as ExtractPrefix]: T[K] } -const upDownFlagsDefSource = { ...flagsDef, ...pullRequestFlagsDef, ...commentTemplateFlagDef } as const +const upDownFlagsDefSource = { + ...flagsDef, + ...pullRequestFlagsDef, + ...commentTemplateFlagDef, + ...buildFlagDef, +} as const export const upDownFlagsDef = { ...mapKeys(upDownFlagsDefSource, (_v, k) => `${flagPrefix}-${k}`) as Prefixed, @@ -73,7 +86,7 @@ export const upDownFlagsDef = { })(), } as const -export const parseUpDownFlagsDef = (argv: string[]) => mapKeys( - parseFlags(upDownFlagsDef, argv), +export const parseUpDownFlagsDef = async (argv: string[]) => mapKeys( + await parseFlags(upDownFlagsDef, argv), (_v, k) => k.replace(/^github-/, ''), ) as Unprefixed> diff --git a/packages/plugin-github/src/hooks.ts b/packages/plugin-github/src/hooks.ts index 580e5108..c9b9ef61 100644 --- a/packages/plugin-github/src/hooks.ts +++ b/packages/plugin-github/src/hooks.ts @@ -1,13 +1,13 @@ -import { HookFunc } from '@preevy/cli-common' +import { HookFunc, HookName } from '@preevy/cli-common' import { Octokit } from 'octokit' import { Config as OclifConfig } from '@oclif/core/lib/interfaces' import { Logger, detectCiProvider } from '@preevy/core' -import { memoize } from 'lodash' +import { mapValues, memoize } from 'lodash' import { upsertPreevyComment, Content } from './lib/github-comment' import { parseUpDownFlagsDef } from './flags' import { PluginConfig, loadGithubPullRequestCommentConfig } from './config' -const SCOPED_ENV_VAR = 'GITHUB_PR_COMMENT_ENABLED' +const COMMENT_ENABLED_ENV_KEY = 'GITHUB_PR_COMMENT_ENABLED' const upsertPrCommentHook = async ({ argv, pluginConfig, oclifConfig, log, envId, content }: { argv: string[] @@ -17,12 +17,12 @@ const upsertPrCommentHook = async ({ argv, pluginConfig, oclifConfig, log, envId envId: string content: Content }) => { - if (oclifConfig.scopedEnvVar(SCOPED_ENV_VAR) && !oclifConfig.scopedEnvVarTrue(SCOPED_ENV_VAR)) { - log.debug(`Skipping due to env var ${oclifConfig.scopedEnvVarKey(SCOPED_ENV_VAR)}=${oclifConfig.scopedEnvVar(SCOPED_ENV_VAR)}`) + if (oclifConfig.scopedEnvVar(COMMENT_ENABLED_ENV_KEY) && !oclifConfig.scopedEnvVarTrue(COMMENT_ENABLED_ENV_KEY)) { + log.debug(`Skipping due to env var ${oclifConfig.scopedEnvVarKey(COMMENT_ENABLED_ENV_KEY)}=${oclifConfig.scopedEnvVar(COMMENT_ENABLED_ENV_KEY)}`) return } - const flags = parseUpDownFlagsDef(argv) + const flags = await parseUpDownFlagsDef(argv) if (flags['pr-comment-enabled'] === 'no') { log.debug('Skipping due to flag') @@ -53,18 +53,53 @@ const upsertPrCommentHook = async ({ argv, pluginConfig, oclifConfig, log, envId }) } -export const envCreated = ({ argv, pluginConfig, oclifConfig }: { +type HookFactory = ({ argv, pluginConfig, oclifConfig }: { argv: string[] pluginConfig: PluginConfig oclifConfig: OclifConfig -}): HookFunc<'envCreated'> => async ({ log }, { envId, urls }) => { +}) => Promise> + +export const envCreated: HookFactory<'envCreated'> = async ( + { argv, pluginConfig, oclifConfig }, +) => async ({ log }, { envId, urls }) => { await upsertPrCommentHook({ argv, pluginConfig, oclifConfig, log, envId, content: { urls } }) } -export const envDeleted = ({ argv, pluginConfig, oclifConfig }: { - argv: string[] - pluginConfig: PluginConfig - oclifConfig: OclifConfig -}): HookFunc<'envDeleted'> => async ({ log }, { envId }) => { +export const envDeleted: HookFactory<'envDeleted'> = async ( + { argv, pluginConfig, oclifConfig }, +) => async ({ log }, { envId }) => { await upsertPrCommentHook({ argv, pluginConfig, oclifConfig, log, envId, content: 'deleted' }) } + +export const userModelFilter: HookFactory<'userModelFilter'> = async ({ argv }) => { + const { 'add-build-cache': addBuildCache } = await parseUpDownFlagsDef(argv) + if (!addBuildCache) { + return async ({ userModel }) => userModel + } + + return async ({ log, userModel }) => { + log.debug('Adding GHA build cache to user model') + + return { + ...userModel, + services: { + ...mapValues(userModel.services ?? {}, (({ build, ...rest }, serviceName) => { + if (!build) { + return rest + } + + const scope = [userModel.name, serviceName].join('/') + + return ({ + ...rest, + build: { + ...build, + cache_from: (build.cache_from ?? []).concat(`type=gha,scope=${scope}`), + cache_to: (build.cache_to ?? []).concat(`type=gha,scope=${scope},mode=max`), + }, + }) + })), + }, + } + } +} diff --git a/packages/plugin-github/src/index.ts b/packages/plugin-github/src/index.ts index c92271c0..ca7b73a7 100644 --- a/packages/plugin-github/src/index.ts +++ b/packages/plugin-github/src/index.ts @@ -1,5 +1,5 @@ import { Plugin } from '@preevy/cli-common' -import { envCreated, envDeleted } from './hooks' +import { envCreated, envDeleted, userModelFilter } from './hooks' import { PluginConfig } from './config' import { upDownFlagsDef } from './flags' import CommentGithubPr from './commands/github/pr/comment' @@ -17,8 +17,9 @@ export const preevyPlugin: Plugin = { description: 'GitHub integration', }], hooks: { - envCreated: envCreated(context), - envDeleted: envDeleted(context), + userModelFilter: await userModelFilter(context), + envCreated: await envCreated(context), + envDeleted: await envDeleted(context), }, }), } diff --git a/site/docs/intro/under-the-hood.md b/site/docs/intro/under-the-hood.md index 91ac179a..d5fcd655 100644 --- a/site/docs/intro/under-the-hood.md +++ b/site/docs/intro/under-the-hood.md @@ -7,16 +7,24 @@ title: Under the hood When provisioning a new environment using the [`up`](/cli-reference#preevy-up-service) command, Preevy does the following: -- Reads for [default configurations](#profile-configuration) and relevant keys from the current profile store. -- Calculates environment name based on the current git branch (or uses the `--id` flag.) -- Uses the local Cloud provider configuration to provision a new VM. -- Reads SSH keypair from profile to access the VM, if necessary, generate a new one. -- Connects to the VM using SSH and sets up Docker. -- Reads the compose file and copies local volume mounts to the VM. -- Augments the compose deployment with a helper service, `tunnel-agent`, responsible for connecting to the [tunnel server](/tunnel-server). -- Runs the application using [docker-compose](https://docs.docker.com/compose/) with `--build` while using the local build context. -- The `tunnel-agent` is inspecting the network configuration of all deployed services and create a tunnel for each service. -- Fetch the urls from tunnel-agent and output them to the end user. +- Load the configuration + - Read the specified Compose file(s) + - Read the tunneling key and default flags from the profile. + - Calculate the environment ID based on the current git branch (or uses the `--id` flag.) + - Connect to the Tunnel Server using the tunneling key to pre-generate the public URLs in env vars +- Make sure a machine is provisioned: + - Query the configured cloud provider for an existing machine + - If a machine doesn't exist yet, a new one is provisioned and a Docker server is set up in it +- Set up a SSH tunnel to the Docker server on the provisioned machine +- Build the Compose project + - Extract the build information from the specified Compose file(s) and combine it with the specified build options to generates an interim build Compose file. + - Run `docker buildx bake` with the generated build Compose file. + - The resulting images are either loaded to the provisioned machine or written to an image registry. +- Deploy the Compose services to the machine's Docker server using the `docker compose up` command + - Local volumes mounts are copied to the remote machine firsst + - The original Compose project with is augmented with a helper service, `preevy_proxy`, responsible for connecting to the [Tunnel Server](/tunnel-server). +- The `preevy_proxy` service creates a tunnel for each service. +- Fetch the urls from Tunnel Server and output them. ## Profile configuration diff --git a/site/docs/recipes/faster-build.md b/site/docs/recipes/faster-build.md new file mode 100644 index 00000000..83f36755 --- /dev/null +++ b/site/docs/recipes/faster-build.md @@ -0,0 +1,368 @@ +--- +title: Faster builds +sidebar_position: 2 +--- + +## tl;dr + +To build faster in your CI: + +- Create and use BuildKit builders to offload resource-intensive builds. Specify the `--builder` flag to use the created BuildKit builders. +- Specify the `--registry` flag to automatically add caching to your Compose build model. +- For fine-tuned optimization, create a Compose file for your CI to override the build parameters. + +## Problem + +By default, Preevy runs the build in the Docker server of the remote machine that Preevy provisions. Builds are often more resource intensive than the environment runtime - but those resources are only required when the environment is created or updated. + +In addition, builds often run with no cache (especially when the machine was just created, e.g, a new PR on a new branch) taking longer than they should. Configuring a remote cache manually is possible, but requires some work. + +## Solutions + +1. Offloading the build step to a specialized server can reduce the memory, disk and CPU requirements of the machines provisioned for environments. It can also help speed up the build step so Preview Environments can be created faster in your CI. +2. Reusing cached layers from previous builds can accelerate the build by skipping expensive build steps (e.g, `yarn install`). When creating Preview Environments In CI, cached image layers from the base branch, or previous builds of the same branch can be reused. + +Starting with version `0.0.57`, Preevy runs a separate build step using the [`docker buildx bake`](https://docs.docker.com/engine/reference/commandline/buildx_bake/) command, before running the deploy step using `docker compose up`. Preevy can customize the build step to make use of BuildKit builders, and automatically configure caching for the build. These two features work together to speed up the creation of Preview Environments in your CI. + +## Part 1: Offload the build + +Preevy can use [BuildKit builders](https://docs.docker.com/build/builders/) to offload the build step out of the environment machine. + +Specify a builder using the `--builder` flag at the `preevy up` command. If not specified, the [default builder](https://docs.docker.com/build/builders/#selected-builder) will be used. + +Out-of-the-box, Docker's default builder uses the [Docker driver](https://docs.docker.com/build/drivers/docker/). This driver uses the connected Docker Server to build. Preevy sets the Docker Server to the provisioned machine's Docker Server (using the `DOCKER_HOST` environment variable), so the build runs there. + +To run the build on the local machine (where the `preevy` CLI runs), or a remote server, configure a builder with a different driver. The [`docker buildx create` command](https://docs.docker.com/engine/reference/commandline/buildx_create) can be used to created a builder. + +### Choosing a builder driver + +- The [Docker container](https://docs.docker.com/build/drivers/docker-container/) driver is the simplest option - it will run the build on the Docker server of the local machine. +- Use the [Kubernetes driver](https://docs.docker.com/build/drivers/kubernetes/) to run the build on a Kubernetes cluster. Kubernetes can be set up to allocate powerful servers to the build. +- Use the [Remote driver](https://docs.docker.com/build/drivers/remote/) to connect to a remote BuildKit daemon. +- Use a 3rd party service like [Depot](https://depot.dev/docs/guides/docker-build) to host the build. Preevy can work with any builder that runs via `docker buildx build`. + +### Setting up a builder in GitHub Actions + +For GitHub actions, the [`setup-buildx-action`](https://github.com/marketplace/actions/docker-setup-buildx) can be used to simplify builder management. + +## Part 2: Automatically configure cache + +Preevy can automatically add the `cache_to` and `cache_from` directives in the [build section of the Compose file](https://docs.docker.com/compose/compose-file/build/) to specify a layer cache to be used when building your images. + +To share the cache across different CI runs, it needs to be stored on a remote backend - not on the build machine, which is usually ephemeral. + +Note that exporting a cache + +### Generated image refs + +To allow reusing the cached image layers, stable IDs are required for each image - the image refs. Preevy generates image refs for each service comprising of the Compose project name (usually the directory name), service name and the current git hash. It will then use the generated image refs to add `cache_from` and `cache_to` directives for each service build. + +At the end of the build step, images will be pushed to the registry. Preevy will then run the provisioning step (`docker compose up`) with a modified Compose file which has the built image refs for each service. The result is a build which automatically uses the specified registry as a cache. + +### Using an image registry as a cache backend + +An [image registry](https://docs.docker.com/build/cache/backends/) can serve as a cache backend. + +When the `--registry` flag is specified, Preevy can automatically add cache directives which use the registry to the Compose project. + +#### Example + +With this `docker-compose.yaml`: + +```yaml +name: my-project # if not specified, the Compose file directory name is used +services: + frontend: + build: . +``` + +And using the git hash `12abcdef`. + +The command: + +```bash +preevy up --registry my-registry --builder my-builder +``` + +Will result the following interim build Compose file: + +```yaml +services: + frontend: + build: + context: . + tags: + - my-registry/preevy-my-project-frontend:latest + - my-registry/preevy-my-project-frontend:12abcdef + cache_to: + - type=registry,ref=my-registry/preevy-my-project-frontend:latest,mode=max,oci-mediatypes=true,image-manifest=true + cache_from: + - my-registry/preevy-my-project-frontend:latest + - my-registry/preevy-my-project-frontend:12abcdef +``` + +At the end of the build step, the tagged image refs will be pushed to the `my-registry` registry. + +The following Compose file will be deployed to the machine: + +```yaml +services: + frontend: + build: + image: my-registry/preevy-my-project-frontend:12abcdef +``` + +#### AWS ECR dance + +Using Amazon [Elastic Container Registry](https://aws.amazon.com/ecr/) as your image registry requires creating a "repository" before pushing an image. When creating image refs for ECR, Preevy uses a slightly different scheme, because image names (the part after the slash) cannot be dynamic - so the dynamic part is moved to the tag. + +Example, with the same project and registry above: + +- Non-ECR image ref: `my-registry/my-project-frontend:12abcdef` +- ECR image ref for the existing repository `my-repo`: `my-registry/my-repo:my-project-frontend-12abcdef` + +Preevy uses the ECR image ref scheme automatically when it detects an ECR registry name. This behavior can be enabled manually by specifying `--registry-single-name=`. Example: `--registry my-registry --registry-single-name=my-repo`. Auto-detection of ECR-style registries can be disabled by specifying `--no-registry-single-name`. + +#### Choosing a registry + +Several options exist: + +* Creating a registry on the same cloud provider used by Preevy to provision the environment machines is usually inexpensive: [ECR](https://aws.amazon.com/ecr/) for AWS, [GAR](https://cloud.google.com/artifact-registry/) for Google Cloud, [ACR](https://azure.microsoft.com/en-us/products/container-registry/) for Azure. +* Creating a registry on the CI provider, e.g, [GHR](https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry) on GitHub actions. +* [Docker Hub](https://www.docker.com/products/docker-hub/) +* [ttl.sh](https://ttl.sh/) is a free, ephemeral and anonymous image registry. +* Other 3rd party registries exist with some free tiers: JFrog, Treescale, Canister, GitLab + +#### Careful when using a builder without a registry + +Without a registry, Preevy will add the `--load` flag to the `docker buildx bake` command to load built images to the environment's Docker server. If the builder does not reside on the same Docker server, built images will be transferred over the network. So, when using a builder other than the default Docker builder, it is advised to also use a registry. + +### Using GitHub Actions cache + +[GitHub Actions](https://docs.docker.com/build/cache/backends/gha/) can also be used as a cache backend. The [Preevy GitHub Plugin](https://github.com/livecycle/preevy/tree/main/packages/plugin-github#readme) can add suitable cache directives to your services. Specify the [`--github-add-build-cache` flag](https://github.com/livecycle/preevy/tree/main/packages/plugin-github#github-docker-build-cache) to enable this feature. + +See the [relevant section in the Docker docs](https://docs.docker.com/build/cache/backends/gha/#authentication) on how to enable authentication of the Docker CLI to the GitHub cache in your CI. + +### Using other cache backends + +More backends are described in the [Docker docs](https://docs.docker.com/build/cache/backends/). + +## Manual optimization + +If you already have an [efficient build pipeline](https://docs.docker.com/build/cache/) which creates images for the current commit, you can skip Preevy's build step entirely and provision an environment with existing images. + +Specify `--no-build` to skip the build step. Preevy will run `docker compose up --no-build` with the given Compose file, which needs to have an `image` property for each service. + +## Complete list of build-related flags + +* `--no-build`: Skip the build step entirely +* `--registry=`: Registry to use. Implies creating and pushing an image ref for each service at the build. Default: Do not use a registry and load built images to the environment's Docker server +* `--builder=`: Builder to use. Defaults to the current buildx builder. +* `--registry-single-name=`: Use single name (ECR-style repository) in image refs. +* `--no-registry-cache`: Do not add `cache_from` and `cache_to` directives to the build Compose file +* `--no-cache`: Do not use cache when building the images +* `--github-add-build-cache`: Add GHA cache directives to all services + +## Real world performance: A case study + +Optimizing the CI build involves using multiple techniques while balancing their benefits and constraints. It might be useful to test and measure some combinations to make sure your CI setup works best for your specific use case. + +We tested a [simple app](https://github.com/livecycle/preevy-gha-gce-demo) comprising of two built images (in addition to an external db image). In each run, Preevy was used to provision a Preview Environment in GitHub Actions on Google Cloud. + +#### Environment machine sizes + +Two machine sizes were tested: + +`e2-small`: 2GB of memory, 0.5-2 vCPUs +`e2-medium`: 4GB of memory, 1-2 vCPUs + +The small machine is good enough for running the app and costs exactly half of the bigger machine. + +#### Build flag variations + +A few variations of the builder, registry and cache were tested: + +|Builder |Registry|Cache|`preevy up` flags| +|:----------------|:--------|:-----|:-----------| +|Environment machine|none |none |None - this is the default build mode| +|CI machine|none |none|`--builder=X` +|CI machine|none |GHA| `--builder=X`
`--github-add-build-cache` | +|CI machine|GHCR |none| `--builder=X`
`--registry=ghcr.io` +|CI machine|GHCR |GHA| `--builder=X`
`--registry=ghcr.io`
`--github-add-build-cache`| +|CI machine|GAR |none| `--builder=X`
`--registry=my-docker.pkg.dev`| +|CI machine|GAR |GHA| `--builder=X`
`--registry=my-docker.pkg.dev`
`--github-add-build-cache`| + +##### Legend: + +GHA: [GitHub Actions cache](https://github.com/actions/cache) +GHCR: [GitHub Container Registry](https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry) +GAR: [Google Artifact Registry](https://cloud.google.com/artifact-registry/) + +#### CI scenarios + +A few scenarios were tested to simulate CI runs in different stages of the development process: + +|Scenario|Description|Code changes|Environment machine exists?|Registry and cache populated?| +|:--|:--------|:------------|:---------------------------|:-----------------------------| +|**A**|From scratch - not likely in CI| |No|No| +|**B**|Commit to existing PR,
no code changes| |Yes|Yes| +|**C**|Commit to existing PR,
code changes|A JSX file|Yes|Yes| +|**D**|Commit to existing PR, dep changes|`package.json`|Yes|Yes +|**E**|First commit to new PR| |No|Yes| + +#### Measurements + +We measured the following steps in the build job: + +- Setup: [copying files to/from the cache](https://docs.docker.com/build/ci/github-actions/cache/#cache-mounts), if a cache was used +- Build: the `docker buildx bake` command +- Deploy: the `docker compose up` command + +VM preparation time was not measured. + +### Results summary + +Offloading the build to the stronger CI machine can reduce the cost of running preview environments significantly - in this sample case by nearly 50%! + +- For the small environment machine, build was decidedly faster when done on the CI machine. +- For the bigger environment machine, it was faster to build a new PR on the CI machine, and especially fast with the GitHub registry (which has a good network connection to the CI machine). + +### Discussion + +Network transfers are a major cause for long builds. Both our GAR and the Environment VMs were in the same region, which is geographically remote from [GitHub's hosted CI runners](https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners). + +Building on the Environment machine is advantageous: It does not require cache import/export, nor registry download/upload, and utilizes fully a local cache. + +The performance benefits of using a registry and/or cache can be seen when building cross-branch. + +### Full results + +#### Scenario A: from scratch + +This is an unlikely scenario in CI, but it serves as a control group for the others. + +##### `e2-small` machine + +|registry|builder|cache|setup time|build time|deploy time|total time| +|:-----|:------|:-----|:-----|:-----|:----|:-----| +| | docker-container | ghcache | 18 | 116 | 34 | 169 +|gar | docker-container | | 7 | 94 | 72 | 172 +| | docker-container | | 3 | 142 | 37 | 182 +|gar | docker-container | ghcache | 13 | 105 | 66 | 183 +| | | | 0 | 128 | 59 | 187 +|ghcr | docker-container | | 9 | 53 | 1091 | 1152 +|ghcr | docker-container | ghcache | 14 | 53 | 1101 | 1168 + +##### `e2-medium` machine + +|registry|builder|cache|setup time|build time|deploy time|total time| +|:-----|:------|:-----|:-----|:-----|:----|:-----| +| | | | 0| 69| 26| 95 +GHCR| CI machine| | 2| 47| 46| 95 +GHCR| CI machine| GHA| 11| 50| 51| 113 +GAR| CI machine| | 10| 76| 45| 130 +| | CI machine| | 3| 115| 30| 148 +| | CI machine| GHA| 7| 120| 30| 157 +GAR| CI machine| GHA| 11| 92| 56| 159 + +#### Scenario B: commit to existing PR, no code changes + +##### `e2-small` machine + +|registry|builder|cache|setup time|build time|deploy time|total time| +|:-----|:------|:-----|:-----|:-----|:----|:-----| +| | | | 0 | 9 | 6 | 15 +| ghcr | docker-container | ghcache | 9 | 11 | 5 | 24 +| ghcr | docker-container | | 10 | 8 | 5 | 23 +| gar | docker-container | | 5 | 34 | 5 | 44 +| | docker-container | ghcache | 9 | 51 | 5 | 65 +| gar | docker-container | ghcache | 13 | 58 | 5 | 76 +| | docker-container | | 2 | 101 | 29 | 132 + +##### `e2-medium` machine + +|registry|builder|cache|setup time|build time|deploy time|total time| +|:-----|:------|:-----|:-----|:-----|:----|:-----| +| | | | 0| 8| 4| 13 +| GHCR| CI machine| | 3| 7| 5| 15 +| GHCR| CI machine| GHA| 13| 7| 5| 24 +|GAR| CI machine| | 10| 36| 5| 51 +| GAR| CI machine| GHA| 13| 34| 5| 52 +| | CI machine| | 2| 96| 29| 127 +| | CI machine| GHA| 15| 108| 29| 152 + +#### Scenario C: commit to existing PR with code changes + +##### `e2-small` machine + +|registry|builder|cache|setup time|build time|deploy time|total time| +|:-----|:------|:-----|:-----|:-----|:----|:-----| +| | | | 0 | 9 | 27 | 36 +| ghcr | docker-container | | 2 | 24 | 31 | 57 +| ghcr | docker-container | ghcache | 9 | 30 | 52 | 91 +| gar | docker-container | | 12 | 53 | 30 | 95 +| gar | docker-container | ghcache | 12 | 59 | 32 | 102 +| | docker-container | ghcache | 9 | 78 | 28 | 115 +| | docker-container | | 6 | 112 | 30 | 147 + +##### `e2-medium` machine + +|registry|builder|cache|setup time|build time|deploy time|total time| +|:-----|:------|:-----|:-----|:-----|:----|:-----| +| | | | 0| 9| 26| 35 +| GHCR| CI machine| | 3| 28| 34| 66 +| GAR| CI machine| | 4| 67| 28| 99 +| GAR| CI machine| GHA| 12| 63| 29| 104 +| GHCR| CI machine| GHA| 10| 47| 56| 113 +| | CI machine| GHA| 14| 91| 26| 132 +| | CI machine| | 3| 110| 30| 143 + +#### Scenario D: commit to existing PR with `package.json` changes + +##### `e2-small` machine + +|registry|builder|cache|setup time|build time|deploy time|total time| +|:-----|:------|:-----|:-----|:-----|:----|:-----| +| ghcr | docker-container | ghcache | 10 | 43 | 52 | 105 +| | docker-container | | 2 | 101 | 28 | 131 +| | docker-container | ghcache | 9 | 97 | 28 | 134 +| gar | docker-container | ghcache | 17 | 78 | 48 | 143 +| gar | docker-container | | 6 | 96 | 48 | 151 +| | | | 0 | 123 | 30 | 153 + +##### `e2-medium` machine + +|registry|builder|cache|setup time|build time|deploy time|total time| +|:-----|:------|:-----|:-----|:-----|:----|:-----| +| | | | 0|29| 27| 56 +| GHCR| CI machine| GHA| 9| 49| 48| 106 +| GHCR| CI machine| | 2| 64| 51| 116 +| | CI machine| | 2| 101| 30| 132 +| GAR| CI machine| | 7| 100| 47| 155 +| | CI machine | GHA| 12| 121| 31| 163 +| GAR| CI machine| GHA| 16| 104| 47| 167 + +#### Scenario E: first commit to new PR (machine does not exist) + +##### `e2-small` machine + +|registry|builder|cache|setup time|build time|deploy time|total time| +|:-----|:------|:-----|:-----|:-----|:----|:-----| +| | docker-container | | 3 | 117 | 37 | 157 +| gar | docker-container | | 6 | 88 | 69 | 164 +| gar | docker-container | ghcache | 17 | 91 | 66 | 174 + | | | | 0 | 153 | 56 | 210 +| ghcr | docker-container | | 7 | 46 | 1066 | 1119 +| ghcr | docker-container | ghcache | 13 | 41 | 1082 | 1136 + +##### `e2-medium` machine + +|registry|builder|cache|setup time|build time|deploy time|total time| +|:-----|:------|:-----|:-----|:-----|:----|:-----| +| GHCR| CI machine| | 8| 8| 62| 78 +| GAR| CI machine| | 4|28| 59| 91 +| GHCR| CI machine| GHA| 21|16| 57| 94 +| | | | 0| 71| 26| 96 +| GAR| CI machine| GHA| 17| 30| 57| 104 +| | CI machine| GHA| 11| 82| 26| 119 +| | CI machine| | 7| 94| 27| 128 diff --git a/site/docs/recipes/service-discovery.md b/site/docs/recipes/service-discovery.md index e97f10a7..6a352b43 100644 --- a/site/docs/recipes/service-discovery.md +++ b/site/docs/recipes/service-discovery.md @@ -1,6 +1,6 @@ --- title: "Service discovery using environment variables" -sidebar_position: 2 +sidebar_position: 3 --- ## tl;dr diff --git a/site/docs/roadmap.md b/site/docs/roadmap.md index 4e8ee194..727fd308 100644 --- a/site/docs/roadmap.md +++ b/site/docs/roadmap.md @@ -13,10 +13,6 @@ sidebar_position: 7 - [x] [Kubernetes](drivers/kube-pod.md) - [ ] [fly.io](https://fly.io/) -## Build customization - -We plan to provide customization to the build process of the environment. Instead building the containers on the same machine they run on, we can save computing power and time by building on dedicated build machines, which are potentially faster than the runtime machine, or build on the local dev machine. - ## Plugins A plugin system exists for Preevy providing a way to expand the preview environment with more functionality. diff --git a/tunnel-server/package.json b/tunnel-server/package.json index 18696123..25977f95 100644 --- a/tunnel-server/package.json +++ b/tunnel-server/package.json @@ -38,13 +38,13 @@ "@types/node": "18", "@types/node-fetch": "^2.6.4", "@types/ssh2": "^1.11.8", - "@typescript-eslint/eslint-plugin": "6.7.4", - "@typescript-eslint/parser": "6.7.4", + "@typescript-eslint/eslint-plugin": "6.10.0", + "@typescript-eslint/parser": "6.10.0", "eslint": "^8.36.0", "jest": "29.7.0", "nodemon": "^2.0.20", "ts-jest": "29.1.1", - "typescript": "^5.0.4", + "typescript": "^5.2.2", "wait-for-expect": "^3.0.2" }, "scripts": { diff --git a/yarn.lock b/yarn.lock index 52646262..e48128be 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4100,16 +4100,16 @@ dependencies: "@types/yargs-parser" "*" -"@typescript-eslint/eslint-plugin@6.7.4": - version "6.7.4" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.7.4.tgz#057338df21b6062c2f2fc5999fbea8af9973ac6d" - integrity sha512-DAbgDXwtX+pDkAHwiGhqP3zWUGpW49B7eqmgpPtg+BKJXwdct79ut9+ifqOFPJGClGKSHXn2PTBatCnldJRUoA== +"@typescript-eslint/eslint-plugin@6.10.0": + version "6.10.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.10.0.tgz#cfe2bd34e26d2289212946b96ab19dcad64b661a" + integrity sha512-uoLj4g2OTL8rfUQVx2AFO1hp/zja1wABJq77P6IclQs6I/m9GLrm7jCdgzZkvWdDCQf1uEvoa8s8CupsgWQgVg== dependencies: "@eslint-community/regexpp" "^4.5.1" - "@typescript-eslint/scope-manager" "6.7.4" - "@typescript-eslint/type-utils" "6.7.4" - "@typescript-eslint/utils" "6.7.4" - "@typescript-eslint/visitor-keys" "6.7.4" + "@typescript-eslint/scope-manager" "6.10.0" + "@typescript-eslint/type-utils" "6.10.0" + "@typescript-eslint/utils" "6.10.0" + "@typescript-eslint/visitor-keys" "6.10.0" debug "^4.3.4" graphemer "^1.4.0" ignore "^5.2.4" @@ -4143,15 +4143,15 @@ eslint-scope "^5.1.1" eslint-utils "^3.0.0" -"@typescript-eslint/parser@6.7.4": - version "6.7.4" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-6.7.4.tgz#23d1dd4fe5d295c7fa2ab651f5406cd9ad0bd435" - integrity sha512-I5zVZFY+cw4IMZUeNCU7Sh2PO5O57F7Lr0uyhgCJmhN/BuTlnc55KxPonR4+EM3GBdfiCyGZye6DgMjtubQkmA== +"@typescript-eslint/parser@6.10.0": + version "6.10.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-6.10.0.tgz#578af79ae7273193b0b6b61a742a2bc8e02f875a" + integrity sha512-+sZwIj+s+io9ozSxIWbNB5873OSdfeBEH/FR0re14WLI6BaKuSOnnwCJ2foUiu8uXf4dRp1UqHP0vrZ1zXGrog== dependencies: - "@typescript-eslint/scope-manager" "6.7.4" - "@typescript-eslint/types" "6.7.4" - "@typescript-eslint/typescript-estree" "6.7.4" - "@typescript-eslint/visitor-keys" "6.7.4" + "@typescript-eslint/scope-manager" "6.10.0" + "@typescript-eslint/types" "6.10.0" + "@typescript-eslint/typescript-estree" "6.10.0" + "@typescript-eslint/visitor-keys" "6.10.0" debug "^4.3.4" "@typescript-eslint/parser@^4.31.2": @@ -4180,6 +4180,14 @@ "@typescript-eslint/types" "5.54.0" "@typescript-eslint/visitor-keys" "5.54.0" +"@typescript-eslint/scope-manager@6.10.0": + version "6.10.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-6.10.0.tgz#b0276118b13d16f72809e3cecc86a72c93708540" + integrity sha512-TN/plV7dzqqC2iPNf1KrxozDgZs53Gfgg5ZHyw8erd6jd5Ta/JIEcdCheXFt9b1NYb93a1wmIIVW/2gLkombDg== + dependencies: + "@typescript-eslint/types" "6.10.0" + "@typescript-eslint/visitor-keys" "6.10.0" + "@typescript-eslint/scope-manager@6.12.0": version "6.12.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-6.12.0.tgz#5833a16dbe19cfbad639d4d33bcca5e755c7044b" @@ -4188,21 +4196,13 @@ "@typescript-eslint/types" "6.12.0" "@typescript-eslint/visitor-keys" "6.12.0" -"@typescript-eslint/scope-manager@6.7.4": - version "6.7.4" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-6.7.4.tgz#a484a17aa219e96044db40813429eb7214d7b386" - integrity sha512-SdGqSLUPTXAXi7c3Ob7peAGVnmMoGzZ361VswK2Mqf8UOYcODiYvs8rs5ILqEdfvX1lE7wEZbLyELCW+Yrql1A== - dependencies: - "@typescript-eslint/types" "6.7.4" - "@typescript-eslint/visitor-keys" "6.7.4" - -"@typescript-eslint/type-utils@6.7.4": - version "6.7.4" - resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-6.7.4.tgz#847cd3b59baf948984499be3e0a12ff07373e321" - integrity sha512-n+g3zi1QzpcAdHFP9KQF+rEFxMb2KxtnJGID3teA/nxKHOVi3ylKovaqEzGBbVY2pBttU6z85gp0D00ufLzViQ== +"@typescript-eslint/type-utils@6.10.0": + version "6.10.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-6.10.0.tgz#1007faede067c78bdbcef2e8abb31437e163e2e1" + integrity sha512-wYpPs3hgTFblMYwbYWPT3eZtaDOjbLyIYuqpwuLBBqhLiuvJ+9sEp2gNRJEtR5N/c9G1uTtQQL5AhV0fEPJYcg== dependencies: - "@typescript-eslint/typescript-estree" "6.7.4" - "@typescript-eslint/utils" "6.7.4" + "@typescript-eslint/typescript-estree" "6.10.0" + "@typescript-eslint/utils" "6.10.0" debug "^4.3.4" ts-api-utils "^1.0.1" @@ -4216,16 +4216,16 @@ resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.54.0.tgz#7d519df01f50739254d89378e0dcac504cab2740" integrity sha512-nExy+fDCBEgqblasfeE3aQ3NuafBUxZxgxXcYfzYRZFHdVvk5q60KhCSkG0noHgHRo/xQ/BOzURLZAafFpTkmQ== +"@typescript-eslint/types@6.10.0": + version "6.10.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-6.10.0.tgz#f4f0a84aeb2ac546f21a66c6e0da92420e921367" + integrity sha512-36Fq1PWh9dusgo3vH7qmQAj5/AZqARky1Wi6WpINxB6SkQdY5vQoT2/7rW7uBIsPDcvvGCLi4r10p0OJ7ITAeg== + "@typescript-eslint/types@6.12.0": version "6.12.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-6.12.0.tgz#ffc5297bcfe77003c8b7b545b51c2505748314ac" integrity sha512-MA16p/+WxM5JG/F3RTpRIcuOghWO30//VEOvzubM8zuOOBYXsP+IfjoCXXiIfy2Ta8FRh9+IO9QLlaFQUU+10Q== -"@typescript-eslint/types@6.7.4": - version "6.7.4" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-6.7.4.tgz#5d358484d2be986980c039de68e9f1eb62ea7897" - integrity sha512-o9XWK2FLW6eSS/0r/tgjAGsYasLAnOWg7hvZ/dGYSSNjCh+49k5ocPN8OmG5aZcSJ8pclSOyVKP2x03Sj+RrCA== - "@typescript-eslint/typescript-estree@4.33.0": version "4.33.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-4.33.0.tgz#0dfb51c2908f68c5c08d82aefeaf166a17c24609" @@ -4252,43 +4252,43 @@ semver "^7.3.7" tsutils "^3.21.0" -"@typescript-eslint/typescript-estree@6.12.0": - version "6.12.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-6.12.0.tgz#764ccc32598549e5b48ec99e3b85f89b1385310c" - integrity sha512-vw9E2P9+3UUWzhgjyyVczLWxZ3GuQNT7QpnIY3o5OMeLO/c8oHljGc8ZpryBMIyympiAAaKgw9e5Hl9dCWFOYw== +"@typescript-eslint/typescript-estree@6.10.0": + version "6.10.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-6.10.0.tgz#667381eed6f723a1a8ad7590a31f312e31e07697" + integrity sha512-ek0Eyuy6P15LJVeghbWhSrBCj/vJpPXXR+EpaRZqou7achUWL8IdYnMSC5WHAeTWswYQuP2hAZgij/bC9fanBg== dependencies: - "@typescript-eslint/types" "6.12.0" - "@typescript-eslint/visitor-keys" "6.12.0" + "@typescript-eslint/types" "6.10.0" + "@typescript-eslint/visitor-keys" "6.10.0" debug "^4.3.4" globby "^11.1.0" is-glob "^4.0.3" semver "^7.5.4" ts-api-utils "^1.0.1" -"@typescript-eslint/typescript-estree@6.7.4": - version "6.7.4" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-6.7.4.tgz#f2baece09f7bb1df9296e32638b2e1130014ef1a" - integrity sha512-ty8b5qHKatlNYd9vmpHooQz3Vki3gG+3PchmtsA4TgrZBKWHNjWfkQid7K7xQogBqqc7/BhGazxMD5vr6Ha+iQ== +"@typescript-eslint/typescript-estree@6.12.0": + version "6.12.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-6.12.0.tgz#764ccc32598549e5b48ec99e3b85f89b1385310c" + integrity sha512-vw9E2P9+3UUWzhgjyyVczLWxZ3GuQNT7QpnIY3o5OMeLO/c8oHljGc8ZpryBMIyympiAAaKgw9e5Hl9dCWFOYw== dependencies: - "@typescript-eslint/types" "6.7.4" - "@typescript-eslint/visitor-keys" "6.7.4" + "@typescript-eslint/types" "6.12.0" + "@typescript-eslint/visitor-keys" "6.12.0" debug "^4.3.4" globby "^11.1.0" is-glob "^4.0.3" semver "^7.5.4" ts-api-utils "^1.0.1" -"@typescript-eslint/utils@6.7.4": - version "6.7.4" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-6.7.4.tgz#2236f72b10e38277ee05ef06142522e1de470ff2" - integrity sha512-PRQAs+HUn85Qdk+khAxsVV+oULy3VkbH3hQ8hxLRJXWBEd7iI+GbQxH5SEUSH7kbEoTp6oT1bOwyga24ELALTA== +"@typescript-eslint/utils@6.10.0": + version "6.10.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-6.10.0.tgz#4d76062d94413c30e402c9b0df8c14aef8d77336" + integrity sha512-v+pJ1/RcVyRc0o4wAGux9x42RHmAjIGzPRo538Z8M1tVx6HOnoQBCX/NoadHQlZeC+QO2yr4nNSFWOoraZCAyg== dependencies: "@eslint-community/eslint-utils" "^4.4.0" "@types/json-schema" "^7.0.12" "@types/semver" "^7.5.0" - "@typescript-eslint/scope-manager" "6.7.4" - "@typescript-eslint/types" "6.7.4" - "@typescript-eslint/typescript-estree" "6.7.4" + "@typescript-eslint/scope-manager" "6.10.0" + "@typescript-eslint/types" "6.10.0" + "@typescript-eslint/typescript-estree" "6.10.0" semver "^7.5.4" "@typescript-eslint/utils@^5.10.0": @@ -4334,6 +4334,14 @@ "@typescript-eslint/types" "5.54.0" eslint-visitor-keys "^3.3.0" +"@typescript-eslint/visitor-keys@6.10.0": + version "6.10.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-6.10.0.tgz#b9eaf855a1ac7e95633ae1073af43d451e8f84e3" + integrity sha512-xMGluxQIEtOM7bqFCo+rCMh5fqI+ZxV5RUUOa29iVPz1OgCZrtc7rFnz5cLUazlkPKYqX+75iuDq7m0HQ48nCg== + dependencies: + "@typescript-eslint/types" "6.10.0" + eslint-visitor-keys "^3.4.1" + "@typescript-eslint/visitor-keys@6.12.0": version "6.12.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-6.12.0.tgz#5877950de42a0f3344261b7a1eee15417306d7e9" @@ -4342,14 +4350,6 @@ "@typescript-eslint/types" "6.12.0" eslint-visitor-keys "^3.4.1" -"@typescript-eslint/visitor-keys@6.7.4": - version "6.7.4" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-6.7.4.tgz#80dfecf820fc67574012375859085f91a4dff043" - integrity sha512-pOW37DUhlTZbvph50x5zZCkFn3xzwkGtNoJHzIM3svpiSkJzwOYr/kVBaXmf+RAQiUDs1AHEZVNPg6UJCJpwRA== - dependencies: - "@typescript-eslint/types" "6.7.4" - eslint-visitor-keys "^3.4.1" - "@ungap/structured-clone@^1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406" @@ -5970,6 +5970,15 @@ define-data-property@^1.0.1: gopd "^1.0.1" has-property-descriptors "^1.0.0" +define-data-property@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.1.tgz#c35f7cd0ab09883480d12ac5cb213715587800b3" + integrity sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ== + dependencies: + get-intrinsic "^1.2.1" + gopd "^1.0.1" + has-property-descriptors "^1.0.0" + define-lazy-prop@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" @@ -6076,6 +6085,20 @@ dir-glob@^3.0.1: dependencies: path-type "^4.0.0" +disposablestack@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/disposablestack/-/disposablestack-1.1.2.tgz#aa5e1713ee584f8fc936bc3b3227ffcbc2857bd8" + integrity sha512-th2rg6l1iN8YC0KiVjOImq7MEmCaAaJlIOPSlw5S1Ee2iCeFJ2NZbwHt14X3mu2sFlOkDkSzlVlZD+kL97ZtsQ== + dependencies: + define-properties "^1.2.1" + es-abstract "^1.22.1" + es-set-tostringtag "^2.0.1" + get-intrinsic "^1.2.1" + globalthis "^1.0.3" + has-symbols "^1.0.3" + internal-slot "^1.0.5" + suppressed-error "^1.0.2" + docker-modem@^3.0.0: version "3.0.6" resolved "https://registry.yarnpkg.com/docker-modem/-/docker-modem-3.0.6.tgz#8c76338641679e28ec2323abb65b3276fb1ce597" @@ -12977,6 +13000,20 @@ supports-preserve-symlinks-flag@^1.0.0: resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== +suppressed-error@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/suppressed-error/-/suppressed-error-1.0.2.tgz#743ae974535b9ce6eb79c0f0b17c6db659b8baf5" + integrity sha512-DRCmKxbAr3MPoluOCMYsvTabZ3l1RsLCWELW/siVrWqDthyW4gq1vA5O87iCXJQYrcxgFTOJU125JAlDwmgPiw== + dependencies: + define-data-property "^1.1.0" + define-properties "^1.2.1" + es-abstract "^1.22.1" + function-bind "^1.1.1" + get-intrinsic "^1.2.1" + globalthis "^1.0.3" + has-property-descriptors "^1.0.0" + set-function-name "^2.0.1" + syncpack@^9.8.4: version "9.8.6" resolved "https://registry.yarnpkg.com/syncpack/-/syncpack-9.8.6.tgz#edc06ea03295773165fb5086bb2655be03e5f094" @@ -13439,7 +13476,7 @@ typedarray@^0.0.6: resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.9.5.tgz#095979f9bcc0d09da324d58d03ce8f8374cbe65a" integrity sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g== -typescript@^5.0.4: +typescript@^5.2.2: version "5.2.2" resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.2.2.tgz#5ebb5e5a5b75f085f22bc3f8460fba308310fa78" integrity sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==