From 4af513551b1e6b8ad8be08c03d9c135b0587f99d Mon Sep 17 00:00:00 2001 From: Pascal Bertschi Date: Wed, 4 Jan 2023 20:29:08 +0100 Subject: [PATCH 1/3] feat: allow generates to be exported --- examples/export/.hammerkit.yaml | 16 ++++++++ src/executer/docker-node.ts | 24 ++++++++++++ src/parser/build-file-task.ts | 1 + src/parser/parse-build-file.ts | 3 +- src/planner/utils/plan-work-node.ts | 51 +++++++++++++++----------- src/planner/work-node.ts | 2 + src/testing/integration/export.spec.ts | 22 +++++++++++ 7 files changed, 97 insertions(+), 22 deletions(-) create mode 100644 examples/export/.hammerkit.yaml create mode 100644 src/testing/integration/export.spec.ts diff --git a/examples/export/.hammerkit.yaml b/examples/export/.hammerkit.yaml new file mode 100644 index 00000000..d6ffabeb --- /dev/null +++ b/examples/export/.hammerkit.yaml @@ -0,0 +1,16 @@ +tasks: + example_file: + image: alpine + generates: + - path: test.txt + export: true + cmds: + - echo "hello" > test.txt + + example_dir: + image: alpine + generates: + - path: dist + export: true + cmds: + - echo "hello" > dist/test.txt diff --git a/src/executer/docker-node.ts b/src/executer/docker-node.ts index 344c76c5..04fbfbc1 100644 --- a/src/executer/docker-node.ts +++ b/src/executer/docker-node.ts @@ -12,6 +12,7 @@ import { Process } from './process' import { prepareMounts, prepareVolume, pullImage, setUserPermissions } from './execution-steps' import { usingContainer } from '../docker/using-container' import { printContainerOptions } from './print-container-options' +import { extract } from 'tar' function buildCreateOptions( node: ContainerWorkNode, @@ -126,6 +127,29 @@ export function dockerNode( } } + for (const generate of node.generates) { + if (!generate.export || generate.inherited || generate.isFile) { + continue + } + + const readable = await container.getArchive({ + path: generate.path, + }) + await environment.file.createDirectory(generate.path) + await new Promise((resolve, reject) => { + readable + .pipe( + extract({ + cwd: generate.path, + newer: true, + stripComponents: 1, + }) + ) + .on('close', () => resolve()) + .on('error', (err) => reject(err)) + }) + } + return true }) diff --git a/src/parser/build-file-task.ts b/src/parser/build-file-task.ts index 6f04620c..a277843d 100644 --- a/src/parser/build-file-task.ts +++ b/src/parser/build-file-task.ts @@ -26,4 +26,5 @@ export interface BuildFileTask { export interface BuildFileTaskGenerate { resetOnChange?: boolean path: string + export?: boolean } diff --git a/src/parser/parse-build-file.ts b/src/parser/parse-build-file.ts index d765333d..bf29b6d5 100644 --- a/src/parser/parse-build-file.ts +++ b/src/parser/parse-build-file.ts @@ -128,7 +128,8 @@ function parseGenerateArray( } else { return { path: parseString(ctx, `generate[${i}].path`, v.path, false), - resetOnChange: parseBoolean(ctx, `generate[${i}].resetOnChange`, v.resetOnChange, true) || false, + resetOnChange: parseBoolean(ctx, `generate[${i}].resetOnChange`, v.resetOnChange, true) ?? false, + export: parseBoolean(ctx, `generate[${i}].export`, v.export, true) ?? false, } } }) diff --git a/src/planner/utils/plan-work-node.ts b/src/planner/utils/plan-work-node.ts index 8e79437d..cde479b3 100644 --- a/src/planner/utils/plan-work-node.ts +++ b/src/planner/utils/plan-work-node.ts @@ -44,7 +44,7 @@ export interface PlannedTask { platform: BuildFileTaskPlatform | null description: string | null shell: string | null - generates: BuildFileTaskGenerate[] + generates: WorkNodeGenerate[] image: string | null mounts: string[] cmds: BuildTaskCommand[] @@ -118,11 +118,17 @@ export function planTask(workContext: WorkContext, buildTaskResult: BuildTaskRes } } -function mapGenerate(generate: string | BuildFileTaskGenerate): BuildFileTaskGenerate { +function mapGenerate(generate: string | BuildFileTaskGenerate): WorkNodeGenerate { if (typeof generate === 'string') { - return { path: generate, resetOnChange: false } + return { path: generate, resetOnChange: false, export: false, inherited: false, isFile: false } } else { - return { path: generate.path, resetOnChange: generate.resetOnChange ?? false } + return { + path: generate.path, + resetOnChange: generate.resetOnChange ?? false, + export: generate.export ?? false, + inherited: false, + isFile: false, + } } } @@ -197,6 +203,7 @@ export function mapLabels(labels: { [key: string]: string }): LabelValues { function parseWorkNode(id: string, task: PlannedTask, context: WorkContext): WorkNode { const name = [...context.namePrefix, task.name].join(':') + const generates = parseLocalWorkNodeGenerate(task, context, task.envs) const baseWorkNode: BaseWorkNode = { envs: task.envs, id, @@ -209,7 +216,7 @@ function parseWorkNode(id: string, task: PlannedTask, context: WorkContext): Wor buildFile: task.build, taskName: task.name, src: parseLocalWorkNodeSource(task, context, task.envs), - generates: parseLocalWorkNodeGenerate(task, context, task.envs), + generates, plannedTask: task, needs: parseWorkNodeNeeds(task.needs, context), labels: mapLabels(task.labels), @@ -217,7 +224,7 @@ function parseWorkNode(id: string, task: PlannedTask, context: WorkContext): Wor } if (task.image) { - const mounts = getContainerMounts(baseWorkNode, parseContainerWorkNodeMount(task, context, task.envs)) + const mounts = getContainerMounts(baseWorkNode, parseContainerWorkNodeMount(task, context, generates, task.envs)) return { ...baseWorkNode, type: 'container', @@ -247,19 +254,16 @@ export function parseContainerWorkNodePorts( function parseContainerWorkNodeMount( task: PlannedTask, context: WorkContext, + generates: WorkNodeGenerate[], envs: { [key: string]: string } | null ): WorkMount[] { const mounts = task.mounts.map((m) => templateValue(m, envs)).map((m) => parseWorkNodeMount(context.cwd, m)) - const fileGenerates = task.generates - .filter((g) => extname(g.path).length > 1) - .map((g) => { - const path = normalizePath(context.cwd, context.cwd, templateValue(g.path, envs)) - - return { - localPath: path, - containerPath: path, - } - }) + const fileGenerates = generates + .filter((g) => g.isFile) + .map((g) => ({ + localPath: g.path, + containerPath: g.path, + })) return [...mounts, ...fileGenerates] } @@ -268,11 +272,16 @@ function parseLocalWorkNodeGenerate( context: WorkContext, envs: { [key: string]: string } | null ): WorkNodeGenerate[] { - return task.generates.map((g) => ({ - path: join(context.cwd, templateValue(g.path, envs)), - resetOnChange: g.resetOnChange ?? false, - inherited: false, - })) + return task.generates.map((g) => { + const filePath = join(context.cwd, templateValue(g.path, envs)) + return { + path: filePath, + resetOnChange: g.resetOnChange, + export: g.export, + isFile: extname(g.path).length > 1, + inherited: g.inherited, + } + }) } function parseLocalWorkNodeSource( diff --git a/src/planner/work-node.ts b/src/planner/work-node.ts index ac11474a..ad7157fc 100644 --- a/src/planner/work-node.ts +++ b/src/planner/work-node.ts @@ -34,6 +34,8 @@ export interface WorkNodeGenerate { path: string inherited: boolean resetOnChange: boolean + export: boolean + isFile: boolean } export interface LocalWorkNode extends BaseWorkNode { diff --git a/src/testing/integration/export.spec.ts b/src/testing/integration/export.spec.ts new file mode 100644 index 00000000..74b88ca6 --- /dev/null +++ b/src/testing/integration/export.spec.ts @@ -0,0 +1,22 @@ +import { getTestSuite } from '../get-test-suite' +import { join } from 'path' + +describe('export', () => { + const suite = getTestSuite('export', ['.hammerkit.yaml']) + + afterAll(() => suite.close()) + + it('should export created file', async () => { + const { cli, environment } = await suite.setup({ taskName: 'example_file' }) + const result = await cli.exec() + expect(result.success).toBeTruthy() + expect(await environment.file.read(join(environment.cwd, 'test.txt'))).toEqual('hello\n') + }) + + it('should export created directory', async () => { + const { cli, environment } = await suite.setup({ taskName: 'example_dir' }) + const result = await cli.exec() + expect(result.success).toBeTruthy() + expect(await environment.file.read(join(environment.cwd, 'dist/test.txt'))).toEqual('hello\n') + }) +}) From f5ac5dec1f75cae0bef85a70d569ed523affe1e7 Mon Sep 17 00:00:00 2001 From: Pascal Bertschi Date: Sat, 7 Jan 2023 19:16:03 +0100 Subject: [PATCH 2/3] feat: add deps/needs to services --- examples/include/.hammerkit.yaml | 1 + examples/include/foo/build.yaml | 4 + examples/invalid_loop/.hammerkit.yaml | 13 + examples/reference/.hammerkit.yaml | 1 + examples/reference/foo/build.yaml | 4 + examples/services/.hammerkit.yaml | 20 + examples/services/server.js | 26 ++ src/executer/docker-node.ts | 20 +- src/executer/docker-service.ts | 11 +- src/executer/environment-mock.ts | 1 - src/executer/schedule.ts | 180 +++++--- src/executer/scheduler/check-for-loop.ts | 18 +- src/executer/scheduler/service-state.ts | 11 + src/executer/start-node.ts | 2 + src/index.ts | 1 - src/optimizer/get-work-node-cache-stats.ts | 2 +- src/optimizer/work-node-cache-description.ts | 2 +- src/parser/build-file-service.ts | 2 + src/parser/get-build-file.ts | 4 +- src/parser/parse-build-file-services.ts | 2 + src/parser/read-build-file.ts | 6 +- src/parser/read-env-file.ts | 2 +- src/planner/utils/assign-dependencies.ts | 50 +++ src/planner/utils/build-file-reference.ts | 8 + src/planner/utils/find-build-value.spec.ts | 23 ++ src/planner/utils/find-build-value.ts | 57 +++ src/planner/utils/get-container-user.ts | 7 + src/planner/utils/get-default-kube-config.ts | 6 + ...{get-container-mounts.ts => get-mounts.ts} | 2 +- src/planner/utils/map-generate.ts | 16 + src/planner/utils/map-labels.ts | 9 + src/planner/utils/map-source.ts | 10 + src/planner/utils/normalize-path.spec.ts | 18 + .../utils/parse-container-work-node-mount.ts | 22 + .../utils/parse-container-work-node-ports.ts | 13 + .../utils/parse-local-work-node-generate.ts | 22 + .../utils/parse-local-work-node-source.ts | 18 + ...mount.spec.ts => parse-work-mount.spec.ts} | 18 +- ...work-node-mount.ts => parse-work-mount.ts} | 2 +- src/planner/utils/parse-work-node-command.ts | 26 ++ src/planner/utils/parse-work-node-needs.ts | 83 ++++ src/planner/utils/parse-work-node.ts | 61 +++ src/planner/utils/parse-work-port.spec.ts | 17 + ...e-work-node-port.ts => parse-work-port.ts} | 4 +- src/planner/utils/parse-work-volume.spec.ts | 12 + ...service-volume.ts => parse-work-volume.ts} | 2 +- src/planner/utils/plan-task.ts | 69 ++++ src/planner/utils/plan-work-dependency.ts | 38 -- src/planner/utils/plan-work-node.ts | 383 +----------------- src/planner/utils/plan-work-nodes.ts | 2 +- src/planner/utils/plan-work-tree.ts | 2 +- src/planner/utils/planned-task.ts | 30 ++ src/planner/utils/split-name.spec.ts | 15 + src/planner/utils/split-name.ts | 6 +- src/planner/utils/template-value.spec.ts | 13 + src/planner/validate.ts | 23 +- src/planner/work-node-id.spec.ts | 7 +- src/planner/work-node-id.ts | 2 +- src/planner/work-node.ts | 6 +- .../{work-node-port.ts => work-port.ts} | 2 +- src/planner/work-service.ts | 7 +- src/testing/integration/invalid_loop.spec.ts | 6 + src/testing/integration/kubernetes.spec.ts | 4 + src/testing/integration/reference.spec.ts | 2 +- src/testing/integration/services.spec.ts | 9 +- 65 files changed, 940 insertions(+), 525 deletions(-) create mode 100644 examples/services/server.js create mode 100644 src/planner/utils/assign-dependencies.ts create mode 100644 src/planner/utils/build-file-reference.ts create mode 100644 src/planner/utils/find-build-value.spec.ts create mode 100644 src/planner/utils/find-build-value.ts create mode 100644 src/planner/utils/get-container-user.ts create mode 100644 src/planner/utils/get-default-kube-config.ts rename src/planner/utils/{get-container-mounts.ts => get-mounts.ts} (89%) create mode 100644 src/planner/utils/map-generate.ts create mode 100644 src/planner/utils/map-labels.ts create mode 100644 src/planner/utils/map-source.ts create mode 100644 src/planner/utils/normalize-path.spec.ts create mode 100644 src/planner/utils/parse-container-work-node-mount.ts create mode 100644 src/planner/utils/parse-container-work-node-ports.ts create mode 100644 src/planner/utils/parse-local-work-node-generate.ts create mode 100644 src/planner/utils/parse-local-work-node-source.ts rename src/planner/utils/{parse-work-node-mount.spec.ts => parse-work-mount.spec.ts} (69%) rename src/planner/utils/{parse-work-node-mount.ts => parse-work-mount.ts} (88%) create mode 100644 src/planner/utils/parse-work-node-command.ts create mode 100644 src/planner/utils/parse-work-node-needs.ts create mode 100644 src/planner/utils/parse-work-node.ts create mode 100644 src/planner/utils/parse-work-port.spec.ts rename src/planner/utils/{parse-work-node-port.ts => parse-work-port.ts} (82%) create mode 100644 src/planner/utils/parse-work-volume.spec.ts rename src/planner/utils/{parse-work-service-volume.ts => parse-work-volume.ts} (85%) create mode 100644 src/planner/utils/plan-task.ts delete mode 100644 src/planner/utils/plan-work-dependency.ts create mode 100644 src/planner/utils/planned-task.ts create mode 100644 src/planner/utils/split-name.spec.ts create mode 100644 src/planner/utils/template-value.spec.ts rename src/planner/{work-node-port.ts => work-port.ts} (58%) diff --git a/examples/include/.hammerkit.yaml b/examples/include/.hammerkit.yaml index 212db592..93e3a8c0 100644 --- a/examples/include/.hammerkit.yaml +++ b/examples/include/.hammerkit.yaml @@ -2,6 +2,7 @@ tasks: example: description: run ref task deps: [foo:bar] + needs: [foo:bardb] cmds: - echo foo diff --git a/examples/include/foo/build.yaml b/examples/include/foo/build.yaml index ed87ab53..370bc556 100644 --- a/examples/include/foo/build.yaml +++ b/examples/include/foo/build.yaml @@ -1,3 +1,7 @@ +services: + bardb: + image: postgres:12-alpine + tasks: bar: description: example task diff --git a/examples/invalid_loop/.hammerkit.yaml b/examples/invalid_loop/.hammerkit.yaml index cec11e16..8d7efb33 100644 --- a/examples/invalid_loop/.hammerkit.yaml +++ b/examples/invalid_loop/.hammerkit.yaml @@ -1,4 +1,17 @@ +services: + foodb: + image: postgres:12-alpine + needs: [bardb] + bardb: + image: postgres:12-alpine + needs: [foodb] + tasks: + loopservice: + needs: [foodb] + cmds: + - echo bar + bar: deps: [foo] cmds: diff --git a/examples/reference/.hammerkit.yaml b/examples/reference/.hammerkit.yaml index c23250fa..089c1dde 100644 --- a/examples/reference/.hammerkit.yaml +++ b/examples/reference/.hammerkit.yaml @@ -2,6 +2,7 @@ tasks: example: description: example with dep deps: [foo:bar] + needs: [foo:bardb] cmds: - echo hammertime diff --git a/examples/reference/foo/build.yaml b/examples/reference/foo/build.yaml index 638847b1..e0cfff9d 100644 --- a/examples/reference/foo/build.yaml +++ b/examples/reference/foo/build.yaml @@ -1,3 +1,7 @@ +services: + bardb: + image: postgres:12-alpine + tasks: bar: description: ref task diff --git a/examples/services/.hammerkit.yaml b/examples/services/.hammerkit.yaml index 9cea13a1..0296d3af 100644 --- a/examples/services/.hammerkit.yaml +++ b/examples/services/.hammerkit.yaml @@ -1,4 +1,18 @@ services: + api: + image: node:16.6.0-alpine + deps: [install] + needs: [postgres] + labels: + stage: run + app: example + mounts: + - server.js + - config.json + cmd: node server.js + healthcheck: + cmd: "wget -qO- http://localhost:3000" + postgres: image: postgres:12-alpine labels: @@ -27,6 +41,12 @@ tasks: cmds: - npm install + test: + image: node:16.6.0-alpine + needs: [api] + cmds: + - wget -qO- http://api:3000 + api: image: node:16.6.0-alpine deps: [install] diff --git a/examples/services/server.js b/examples/services/server.js new file mode 100644 index 00000000..a9c21cfa --- /dev/null +++ b/examples/services/server.js @@ -0,0 +1,26 @@ +const { Client } = require('pg') +const { createServer } = require('http') +const config = require('./config.json') + +const client = new Client(`postgres://${config.dbUser}:${config.dbPassword}@${config.dbHost}:5432/${config.dbName}`) + +const server = createServer(async (req, res) => { + const result = await client.query('SELECT $1::text as message', ['Hello world!']) + res.writeHead(200) + res.end(result.rows[0].message) +}) + +async function main() { + await client.connect() + server.listen(3000) +} + +process.on('SIGINT', async function () { + server.close() + await client.end() +}) + +main().catch((err) => { + console.error(err) + process.exit(1) +}) diff --git a/src/executer/docker-node.ts b/src/executer/docker-node.ts index 04fbfbc1..53f2684e 100644 --- a/src/executer/docker-node.ts +++ b/src/executer/docker-node.ts @@ -13,15 +13,13 @@ import { prepareMounts, prepareVolume, pullImage, setUserPermissions } from './e import { usingContainer } from '../docker/using-container' import { printContainerOptions } from './print-container-options' import { extract } from 'tar' +import { WorkService } from '../planner/work-service' -function buildCreateOptions( - node: ContainerWorkNode, - serviceContainers: { [key: string]: ServiceDns } -): ContainerCreateOptions { +export function getNeedsNetwork(serviceContainers: { [key: string]: ServiceDns }, needs: WorkService[]) { const links: string[] = [] const hosts: string[] = [] - for (const need of node.needs) { + for (const need of needs) { const dns = serviceContainers[need.id] if (isHostServiceDns(dns)) { hosts.push(`${need.name}:${dns.host}`) @@ -33,6 +31,14 @@ function buildCreateOptions( links.push(`${dns.containerId}:${need.name}`) } } + return { links, hosts } +} + +function buildCreateOptions( + node: ContainerWorkNode, + serviceContainers: { [key: string]: ServiceDns } +): ContainerCreateOptions { + const network = getNeedsNetwork(serviceContainers, node.needs) return { Image: node.image, @@ -51,8 +57,8 @@ function buildCreateOptions( map[`${port.containerPort}/tcp`] = [{ HostPort: `${port.hostPort}` }] return map }, {}), - ExtraHosts: hosts, - Links: links, + ExtraHosts: network.hosts, + Links: network.links, AutoRemove: true, }, ExposedPorts: node.ports.reduce<{ [key: string]: Record }>((map, port) => { diff --git a/src/executer/docker-service.ts b/src/executer/docker-service.ts index 36d1c485..25468b9f 100644 --- a/src/executer/docker-service.ts +++ b/src/executer/docker-service.ts @@ -10,11 +10,14 @@ import { checkReadiness } from './check-readiness' import { Environment } from './environment' import { State } from './state' import { Process } from './process' -import { prepareMounts, prepareVolume, pullImage, setUserPermissions } from './execution-steps' +import { prepareMounts, prepareVolume, pullImage } from './execution-steps' +import { getNeedsNetwork } from './docker-node' +import { ServiceDns } from './service-dns' export function dockerService( service: ContainerWorkService, stateKey: string, + serviceContainers: { [key: string]: ServiceDns }, state: State, environment: Environment ): Process { @@ -33,6 +36,9 @@ export function dockerService( try { checkForAbort(abort.signal) + + const network = getNeedsNetwork(serviceContainers, service.needs) + status.write('debug', `create container with image ${service.image}`) container = await environment.docker.createContainer({ Image: service.image, @@ -43,7 +49,10 @@ export function dockerService( return map }, {}), Cmd: service.cmd ? service.cmd.split(' ') : undefined, + WorkingDir: service.cwd ? convertToPosixPath(service.cwd) : undefined, HostConfig: { + ExtraHosts: network.hosts, + Links: network.links, Binds: [ ...service.mounts.map((v) => `${v.localPath}:${convertToPosixPath(v.containerPath)}`), ...service.volumes.map((v) => `${v.name}:${convertToPosixPath(v.containerPath)}`), diff --git a/src/executer/environment-mock.ts b/src/executer/environment-mock.ts index 3e9c720f..828e5d15 100644 --- a/src/executer/environment-mock.ts +++ b/src/executer/environment-mock.ts @@ -1,6 +1,5 @@ import { statusConsole } from '../planner/work-node-status' import { Environment } from './environment' -import { Writable } from 'stream' import { getFileContext } from '../file/get-file-context' import { getContainerCli } from './execute-docker' import { consoleContext } from '../log' diff --git a/src/executer/schedule.ts b/src/executer/schedule.ts index bcc5dc09..450559c4 100644 --- a/src/executer/schedule.ts +++ b/src/executer/schedule.ts @@ -1,7 +1,7 @@ import { ServiceDns } from './service-dns' import { Environment } from './environment' import { SchedulerResult } from './scheduler/scheduler-result' -import { isContainerWorkService } from '../planner/work-service' +import { isContainerWorkService, WorkService } from '../planner/work-service' import { dockerService } from './docker-service' import { kubernetesService } from './kubernetes-service' import { isContainerWorkNode } from '../planner/work-node' @@ -12,6 +12,101 @@ import { ProcessManager } from './process-manager' import { logContext } from '../planner/work-node-status' import { startWatchProcesses } from '../start-watch-processes' import { startNode, startService } from './start-node' +import { iterateWorkNodes, iterateWorkServices } from '../planner/utils/plan-work-nodes' +import { SchedulerState } from './scheduler/scheduler-state' +import { NodeState } from './scheduler/node-state' +import { isServiceState, ServiceState } from './scheduler/service-state' + +function ensureNeeds( + nodeOrServiceState: NodeState | ServiceState, + needs: WorkService[], + processManager: ProcessManager, + state: State, + environment: Environment, + currentState: SchedulerState +): boolean { + const endedNeeds = needs + .map((need) => currentState.service[need.id]) + .filter((service) => service.type === 'end' || service.type === 'canceled') + if (endedNeeds.length > 0) { + if (isServiceState(nodeOrServiceState)) { + state.patchService({ + type: 'error', + service: nodeOrServiceState.service, + stateKey: nodeOrServiceState.stateKey, + errorMessage: endedNeeds + .map((n) => `service ${n.service.name} ${n.type === 'end' ? 'has ended ' + n.reason : 'was canceled'}`) + .join(', '), + }) + } else { + state.patchNode( + { + type: 'error', + node: nodeOrServiceState.node, + stateKey: nodeOrServiceState.stateKey, + errorMessage: endedNeeds + .map((n) => `service ${n.service.name} ${n.type === 'end' ? 'has ended ' + n.reason : 'was canceled'}`) + .join(', '), + }, + nodeOrServiceState.stateKey + ) + } + return false + } + + const pendingNeeds = needs + .map((need) => currentState.service[need.id]) + .filter((service) => service.type === 'pending') + + if (pendingNeeds.length > 0) { + for (const pendingNeed of pendingNeeds) { + if (!ensureNeeds(pendingNeed, pendingNeed.service.needs, processManager, state, environment, currentState)) { + continue + } + + state.patchService({ + type: 'starting', + service: pendingNeed.service, + stateKey: null, + }) + + const serviceContainers = getServiceContainers(currentState, pendingNeed.service.needs) + + processManager.background( + { + type: 'service', + name: pendingNeed.service.name, + id: pendingNeed.service.id + '-cache', + }, + async (abort) => { + await startService(pendingNeed.service, state, serviceContainers, environment, abort.signal) + } + ) + } + return false + } + + const hasNotReadyNeeds = needs.some((need) => currentState.service[need.id].type !== 'running') + if (hasNotReadyNeeds) { + return false + } + + return true +} + +export function getServiceContainers( + currentState: SchedulerState, + needs: WorkService[] +): { [key: string]: ServiceDns } { + const serviceContainers: { [key: string]: ServiceDns } = {} + for (const need of needs) { + const serviceState = currentState.service[need.id] + if (serviceState.type === 'running') { + serviceContainers[need.id] = serviceState.dns + } + } + return serviceContainers +} export async function schedule( processManager: ProcessManager, @@ -51,62 +146,11 @@ export async function schedule( } ) } else if (nodeState.type === 'ready') { - const endedNeeds = nodeState.node.needs - .map((need) => currentState.service[need.id]) - .filter((service) => service.type === 'end' || service.type === 'canceled') - if (endedNeeds.length > 0) { - state.patchNode( - { - type: 'error', - node: nodeState.node, - stateKey: nodeState.stateKey, - errorMessage: endedNeeds - .map((n) => `service ${n.service.name} ${n.type === 'end' ? 'has ended ' + n.reason : 'was canceled'}`) - .join(', '), - }, - nodeState.stateKey - ) + if (!ensureNeeds(nodeState, nodeState.node.needs, processManager, state, environment, currentState)) { continue } - const pendingNeeds = nodeState.node.needs - .map((need) => currentState.service[need.id]) - .filter((service) => service.type === 'pending') - - if (pendingNeeds.length > 0) { - for (const pendingNeed of pendingNeeds) { - state.patchService({ - type: 'starting', - service: pendingNeed.service, - stateKey: null, - }) - - processManager.background( - { - type: 'service', - name: pendingNeed.service.name, - id: pendingNeed.service.id + '-cache', - }, - async (abort) => { - await startService(pendingNeed.service, state, environment, abort.signal) - } - ) - } - continue - } - - const hasNotReadyNeeds = nodeState.node.needs.some((need) => currentState.service[need.id].type !== 'running') - if (hasNotReadyNeeds) { - continue - } - - const serviceContainers: { [key: string]: ServiceDns } = {} - for (const need of nodeState.node.needs) { - const serviceState = currentState.service[need.id] - if (serviceState.type === 'running') { - serviceContainers[need.id] = serviceState.dns - } - } + const serviceContainers = getServiceContainers(currentState, nodeState.node.needs) state.patchNode( { @@ -130,6 +174,11 @@ export async function schedule( } for (const [serviceId, serviceState] of Object.entries(currentState.service)) { + const hasOpenDeps = serviceState.service.deps.some((dep) => currentState.node[dep.id].type !== 'completed') + if (hasOpenDeps) { + continue + } + if (serviceState.type === 'ready') { const ctx = logContext('service', serviceState.service) state.patchService({ @@ -137,10 +186,12 @@ export async function schedule( service: serviceState.service, stateKey: serviceState.stateKey, }) + + const serviceContainers = getServiceContainers(currentState, serviceState.service.needs) processManager.background( ctx, isContainerWorkService(serviceState.service) - ? dockerService(serviceState.service, serviceState.stateKey, state, environment) + ? dockerService(serviceState.service, serviceState.stateKey, serviceContainers, state, environment) : kubernetesService(serviceState.service, serviceState.stateKey, state, environment) ) } @@ -150,10 +201,27 @@ export async function schedule( } let hasNeed = false - for (const nodeState of Object.values(currentState.node)) { + for (const nodeState of iterateWorkNodes(currentState.node)) { if (nodeState.type === 'running' || nodeState.type === 'starting' || nodeState.type === 'pending') { if (nodeState.node.needs.some((n) => n.id === serviceId)) { hasNeed = true + break + } + } + } + + if (!hasNeed) { + for (const serviceState of iterateWorkServices(currentState.service)) { + if ( + serviceState.type === 'ready' || + serviceState.type === 'running' || + serviceState.type === 'starting' || + serviceState.type === 'pending' + ) { + if (serviceState.service.needs.some((n) => n.id === serviceId)) { + hasNeed = true + break + } } } } diff --git a/src/executer/scheduler/check-for-loop.ts b/src/executer/scheduler/check-for-loop.ts index 3c3a5c04..f9d2441e 100644 --- a/src/executer/scheduler/check-for-loop.ts +++ b/src/executer/scheduler/check-for-loop.ts @@ -1,10 +1,10 @@ import { SchedulerState } from './scheduler-state' -import { iterateWorkNodes } from '../../planner/utils/plan-work-nodes' -import { hasCycle } from '../../planner/validate' +import { iterateWorkNodes, iterateWorkServices } from '../../planner/utils/plan-work-nodes' +import { hasDependencyCycle, hasNeedCycle } from '../../planner/validate' export function checkForLoop(state: SchedulerState): void { for (const nodeState of iterateWorkNodes(state.node)) { - const cyclePath = hasCycle(nodeState.node, []) + const cyclePath = hasDependencyCycle(nodeState.node, []) if (cyclePath && cyclePath.length > 0) { const errorMessage = `task cycle detected ${cyclePath.map((n) => n.name).join(' -> ')}` state.node[nodeState.node.id] = { @@ -15,4 +15,16 @@ export function checkForLoop(state: SchedulerState): void { } } } + for (const serviceState of iterateWorkServices(state.service)) { + const cyclePath = hasNeedCycle(serviceState.service, []) + if (cyclePath && cyclePath.length > 0) { + const errorMessage = `service cycle detected ${cyclePath.map((n) => n.name).join(' -> ')}` + state.service[serviceState.service.id] = { + type: 'error', + service: serviceState.service, + stateKey: null, + errorMessage, + } + } + } } diff --git a/src/executer/scheduler/service-state.ts b/src/executer/scheduler/service-state.ts index 3f0eb544..3c02cbcd 100644 --- a/src/executer/scheduler/service-state.ts +++ b/src/executer/scheduler/service-state.ts @@ -1,5 +1,6 @@ import { WorkService } from '../../planner/work-service' import { ServiceDns } from '../service-dns' +import { NodeState } from './node-state' export interface ServicePendingState { type: 'pending' @@ -33,6 +34,13 @@ export interface ServiceEndState { stateKey: string } +export interface ServiceErrorState { + type: 'error' + service: WorkService + stateKey: string | null + errorMessage: string +} + export interface ServiceCanceledState { type: 'canceled' service: WorkService @@ -44,5 +52,8 @@ export type ServiceState = | ServiceStartingState | ServiceRunningState | ServiceReadyState + | ServiceErrorState | ServiceEndState | ServiceCanceledState + +export const isServiceState = (val: ServiceState | NodeState): val is ServiceState => 'service' in val diff --git a/src/executer/start-node.ts b/src/executer/start-node.ts index cd84a9b8..50857859 100644 --- a/src/executer/start-node.ts +++ b/src/executer/start-node.ts @@ -5,6 +5,7 @@ import { State } from './state' import { isContainerWorkService, WorkService } from '../planner/work-service' import { getServiceNodeCacheStats, getStateKey } from '../optimizer/get-work-node-cache-stats' import { NodeState } from './scheduler/node-state' +import { ServiceDns } from './service-dns' export async function startNode( node: NodeState, @@ -52,6 +53,7 @@ export async function startNode( export async function startService( service: WorkService, state: State, + serviceContainers: { [key: string]: ServiceDns }, environment: Environment, abortSignal: AbortSignal ): Promise { diff --git a/src/index.ts b/src/index.ts index 474025e5..c864b8bb 100644 --- a/src/index.ts +++ b/src/index.ts @@ -5,7 +5,6 @@ import { consoleContext } from './log' import { getFileContext } from './file/get-file-context' import { statusConsole } from './planner/work-node-status' import { getContainerCli } from './executer/execute-docker' -import { Writable } from 'stream' import { emptyWritable } from './utils/empty-writable' const abortCtrl = new AbortController() diff --git a/src/optimizer/get-work-node-cache-stats.ts b/src/optimizer/get-work-node-cache-stats.ts index 30537942..2ab7b8e0 100644 --- a/src/optimizer/get-work-node-cache-stats.ts +++ b/src/optimizer/get-work-node-cache-stats.ts @@ -4,7 +4,7 @@ import { WorkNodeCacheFileStats, WorkServiceCacheFileStats } from './work-node-c import { WorkNode } from '../planner/work-node' import { Environment } from '../executer/environment' import { StatusScopedConsole } from '../planner/work-node-status' -import { ContainerWorkService, isContainerWorkService, WorkService } from '../planner/work-service' +import { isContainerWorkService, WorkService } from '../planner/work-service' import { CacheMethod } from '../parser/cache-method' import { createHash } from 'crypto' diff --git a/src/optimizer/work-node-cache-description.ts b/src/optimizer/work-node-cache-description.ts index c24680e0..78fdface 100644 --- a/src/optimizer/work-node-cache-description.ts +++ b/src/optimizer/work-node-cache-description.ts @@ -1,6 +1,6 @@ import { WorkNodeCommand } from '../planner/work-node-command' import { platform } from 'os' -import { PlannedTask } from '../planner/utils/plan-work-node' +import { PlannedTask } from '../planner/utils/planned-task' export interface WorkNodeCacheDescription { cwd?: string diff --git a/src/parser/build-file-service.ts b/src/parser/build-file-service.ts index 1e8e2117..eb313bc1 100644 --- a/src/parser/build-file-service.ts +++ b/src/parser/build-file-service.ts @@ -4,6 +4,8 @@ export interface ExecutionBuildService { description: string | null ports: string[] | null mounts: string[] | null + deps: string[] | null + needs: string[] | null cmd: string | null volumes: string[] | null healthcheck: ExecutionBuildServiceHealthCheck | null diff --git a/src/parser/get-build-file.ts b/src/parser/get-build-file.ts index c2dcf958..deb4b561 100644 --- a/src/parser/get-build-file.ts +++ b/src/parser/get-build-file.ts @@ -2,6 +2,6 @@ import { readBuildFile } from './read-build-file' import { BuildFile } from './build-file' import { Environment } from '../executer/environment' -export function getBuildFile(fileName: string, context: Environment): Promise { - return readBuildFile(fileName, {}, context) +export function getBuildFile(fileName: string, environment: Environment): Promise { + return readBuildFile(fileName, {}, environment) } diff --git a/src/parser/parse-build-file-services.ts b/src/parser/parse-build-file-services.ts index ae32819f..256b9957 100644 --- a/src/parser/parse-build-file-services.ts +++ b/src/parser/parse-build-file-services.ts @@ -52,6 +52,8 @@ export function parseBuildFileServices( healthcheck: parseHealthcheck(ctx, serviceValue.healthcheck), labels: parseStringMap(ctx, 'labels', serviceValue.labels), cmd: parseString(ctx, 'cmd', serviceValue.cmd, true), + needs: parseStringArray(ctx, 'needs', serviceValue.needs), + deps: parseStringArray(ctx, 'deps', serviceValue.deps), unknownProps: Object.keys(serviceValue) .filter((k) => validKeys.indexOf(k) === -1) .reduce<{ [key: string]: any }>((map, k) => { diff --git a/src/parser/read-build-file.ts b/src/parser/read-build-file.ts index 6fc91add..7e5e6eb7 100644 --- a/src/parser/read-build-file.ts +++ b/src/parser/read-build-file.ts @@ -31,12 +31,12 @@ export async function read(fileName: string, context: Environment): Promise export async function readBuildFile( fileName: string, files: { [key: string]: BuildFile }, - context: Environment + environment: Environment ): Promise { if (files[fileName]) { return files[fileName] } - const input = await read(fileName, context) - return parseBuildFile(fileName, files, input, context) + const input = await read(fileName, environment) + return parseBuildFile(fileName, files, input, environment) } diff --git a/src/parser/read-env-file.ts b/src/parser/read-env-file.ts index edbf2627..a35e0441 100644 --- a/src/parser/read-env-file.ts +++ b/src/parser/read-env-file.ts @@ -26,7 +26,7 @@ export async function readEnvFile( const key = envVar.substr(0, index) const value = envVar.substr(index + 1) if (!envs[key]) { - environment.console.debug(`load env variable ${key} from ${directory} file`) + // environment.status..debug(`load env variable ${key} from ${directory} file`) envs[key] = value } } diff --git a/src/planner/utils/assign-dependencies.ts b/src/planner/utils/assign-dependencies.ts new file mode 100644 index 00000000..ef23ffb7 --- /dev/null +++ b/src/planner/utils/assign-dependencies.ts @@ -0,0 +1,50 @@ +import { isContainerWorkNode, isWorkNode, WorkNode } from '../work-node' +import { isContainerWorkService, WorkService } from '../work-service' +import { templateValue } from './template-value' +import { BuildFileReference } from './build-file-reference' +import { getWorkNode } from './plan-work-node' + +export function assignDependencies(deps: BuildFileReference[], node: WorkNode | WorkService): void { + const depNodes: WorkNode[] = [] + for (const dep of deps) { + const depName = templateValue(dep.name, dep.build.envs) + const depNode = getWorkNode(dep.context, { name: depName }) + if (!depNodes.some((d) => d.id === depNode.id)) { + depNodes.push(depNode) + } + } + + for (const depNode of depNodes) { + if (node.deps.some((d) => d.id === depNode.id)) { + continue + } + + node.deps.push(depNode) + + if (isWorkNode(node)) { + for (const src of depNode.src) { + if (node.src.indexOf(src) === -1) { + node.src.push(src) + + if (isContainerWorkNode(node)) { + node.mounts.push({ + localPath: src.absolutePath, + containerPath: src.absolutePath, + }) + } + } + } + } + + if (isContainerWorkNode(depNode) && (isContainerWorkNode(node) || isContainerWorkService(node))) { + for (const volume of depNode.volumes) { + if (!node.volumes.some((v) => v.name === volume.name)) { + node.volumes.push({ + ...volume, + inherited: true, + }) + } + } + } + } +} diff --git a/src/planner/utils/build-file-reference.ts b/src/planner/utils/build-file-reference.ts new file mode 100644 index 00000000..769a700c --- /dev/null +++ b/src/planner/utils/build-file-reference.ts @@ -0,0 +1,8 @@ +import { BuildFile } from '../../parser/build-file' +import { WorkContext } from '../work-context' + +export interface BuildFileReference { + build: BuildFile + name: string + context: WorkContext +} diff --git a/src/planner/utils/find-build-value.spec.ts b/src/planner/utils/find-build-value.spec.ts new file mode 100644 index 00000000..352f1434 --- /dev/null +++ b/src/planner/utils/find-build-value.spec.ts @@ -0,0 +1,23 @@ +import { findBuildService } from './find-build-value' +import { getBuildFile } from '../../parser/get-build-file' +import { join } from 'path' +import { environmentMock } from '../../executer/environment-mock' + +describe('find-build-value', () => { + const environment = environmentMock('/home/user/proj') + + it('should find service', async () => { + const buildFile = await getBuildFile(join(__dirname, '../../../examples/include/.hammerkit.yaml'), environment) + const svc = findBuildService( + { + build: buildFile, + cwd: '/home/user/proj', + workTree: { nodes: {}, services: {} }, + namePrefix: [], + }, + { name: 'foo:bardb' } + ) + expect(svc).toBeDefined() + expect(svc.result).toBeDefined() + }) +}) diff --git a/src/planner/utils/find-build-value.ts b/src/planner/utils/find-build-value.ts new file mode 100644 index 00000000..89f4311a --- /dev/null +++ b/src/planner/utils/find-build-value.ts @@ -0,0 +1,57 @@ +import { createSubWorkContext, WorkContext } from '../work-context' +import { ExecutionBuildService } from '../../parser/build-file-service' +import { BuildFileTask } from '../../parser/build-file-task' +import { BuildFile } from '../../parser/build-file' +import { splitName } from './split-name' + +export type BuildTaskResult = { result: T; name: string; context: WorkContext } +export type BuildFileNameSelector = { name: string } + +export function findBuildService( + context: WorkContext, + selector: BuildFileNameSelector +): BuildTaskResult { + return findBuildValue(context, selector, (build, name) => build.services[name]) +} + +export function findBuildTask(context: WorkContext, selector: BuildFileNameSelector): BuildTaskResult { + return findBuildValue(context, selector, (build, name) => build.tasks[name]) +} + +function findBuildValue( + context: WorkContext, + selector: BuildFileNameSelector, + resolver: (buildFile: BuildFile, name: string) => T +): BuildTaskResult { + const result = resolver(context.build, selector.name) + if (result) { + return { result, context, name: selector.name } + } else { + const ref = splitName(selector.name) + if (ref.prefix) { + if (context.build.references[ref.prefix]) { + return findBuildValue( + createSubWorkContext(context, { + name: ref.prefix, + type: 'references', + }), + { name: ref.name }, + resolver + ) + } else if (context.build.includes[ref.prefix]) { + return findBuildValue( + createSubWorkContext(context, { + name: ref.prefix, + type: 'includes', + }), + { + name: ref.name, + }, + resolver + ) + } + } + + throw new Error(`unable to find ${selector.name} in ${context.build.path}`) + } +} diff --git a/src/planner/utils/get-container-user.ts b/src/planner/utils/get-container-user.ts new file mode 100644 index 00000000..4c562695 --- /dev/null +++ b/src/planner/utils/get-container-user.ts @@ -0,0 +1,7 @@ +import { platform } from 'os' + +export function getContainerUser(): string | null { + return platform() === 'linux' || platform() === 'freebsd' || platform() === 'openbsd' || platform() === 'sunos' + ? `${process.getuid()}:${process.getgid()}` + : null +} diff --git a/src/planner/utils/get-default-kube-config.ts b/src/planner/utils/get-default-kube-config.ts new file mode 100644 index 00000000..4826936c --- /dev/null +++ b/src/planner/utils/get-default-kube-config.ts @@ -0,0 +1,6 @@ +import { join } from 'path' +import { homedir } from 'os' + +export function getDefaultKubeConfig(): string { + return join(homedir(), '.kube/config') +} diff --git a/src/planner/utils/get-container-mounts.ts b/src/planner/utils/get-mounts.ts similarity index 89% rename from src/planner/utils/get-container-mounts.ts rename to src/planner/utils/get-mounts.ts index b5b15c63..8af6184a 100644 --- a/src/planner/utils/get-container-mounts.ts +++ b/src/planner/utils/get-mounts.ts @@ -1,7 +1,7 @@ import { BaseWorkNode } from '../work-node' import { WorkMount } from '../work-mount' -export function getContainerMounts(task: BaseWorkNode, workMount: WorkMount[]): WorkMount[] { +export function getMounts(task: BaseWorkNode, workMount: WorkMount[]): WorkMount[] { const result: WorkMount[] = [ ...task.src.map((source) => ({ localPath: source.absolutePath, diff --git a/src/planner/utils/map-generate.ts b/src/planner/utils/map-generate.ts new file mode 100644 index 00000000..2bae6b6c --- /dev/null +++ b/src/planner/utils/map-generate.ts @@ -0,0 +1,16 @@ +import { BuildFileTaskGenerate } from '../../parser/build-file-task' +import { WorkNodeGenerate } from '../work-node' + +export function mapGenerate(generate: string | BuildFileTaskGenerate): WorkNodeGenerate { + if (typeof generate === 'string') { + return { path: generate, resetOnChange: false, export: false, inherited: false, isFile: false } + } else { + return { + path: generate.path, + resetOnChange: generate.resetOnChange ?? false, + export: generate.export ?? false, + inherited: false, + isFile: false, + } + } +} diff --git a/src/planner/utils/map-labels.ts b/src/planner/utils/map-labels.ts new file mode 100644 index 00000000..d172cd8d --- /dev/null +++ b/src/planner/utils/map-labels.ts @@ -0,0 +1,9 @@ +import { LabelValues } from '../../executer/label-values' + +export function mapLabels(labels: { [key: string]: string }): LabelValues { + const result: LabelValues = {} + for (const [key, value] of Object.entries(labels)) { + result[key] = [value] + } + return result +} diff --git a/src/planner/utils/map-source.ts b/src/planner/utils/map-source.ts new file mode 100644 index 00000000..03b7327e --- /dev/null +++ b/src/planner/utils/map-source.ts @@ -0,0 +1,10 @@ +import { BuildFileTaskSource } from '../../parser/build-file-task-source' +import { WorkNodeSource } from '../work-node-source' +import { join } from 'path' + +export function mapSource(src: BuildFileTaskSource, workDir: string): WorkNodeSource { + return { + matcher: src.matcher, + absolutePath: join(workDir, src.relativePath), + } +} diff --git a/src/planner/utils/normalize-path.spec.ts b/src/planner/utils/normalize-path.spec.ts new file mode 100644 index 00000000..b87eab53 --- /dev/null +++ b/src/planner/utils/normalize-path.spec.ts @@ -0,0 +1,18 @@ +import { normalizePath } from './normalize-path' + +describe('normalize-path', () => { + it('should append src to cwd', () => { + const result = normalizePath('/home/user/proj', '/home/user', 'src') + expect(result).toEqual('/home/user/proj/src') + }) + + it('should append .kube to pwd', () => { + const result = normalizePath('/home/user/proj', '/home/user', '$PWD/.kube') + expect(result).toEqual('/home/user/.kube') + }) + + it('should append nothing to /usr/bin', () => { + const result = normalizePath('/home/user/proj', '/home/user', '/usr/bin') + expect(result).toEqual('/usr/bin') + }) +}) diff --git a/src/planner/utils/parse-container-work-node-mount.ts b/src/planner/utils/parse-container-work-node-mount.ts new file mode 100644 index 00000000..aae8ef89 --- /dev/null +++ b/src/planner/utils/parse-container-work-node-mount.ts @@ -0,0 +1,22 @@ +import { WorkContext } from '../work-context' +import { WorkNodeGenerate } from '../work-node' +import { WorkMount } from '../work-mount' +import { templateValue } from './template-value' +import { parseWorkMount } from './parse-work-mount' +import { PlannedTask } from './planned-task' + +export function parseContainerWorkNodeMount( + task: PlannedTask, + context: WorkContext, + generates: WorkNodeGenerate[], + envs: { [key: string]: string } | null +): WorkMount[] { + const mounts = task.mounts.map((m) => templateValue(m, envs)).map((m) => parseWorkMount(context.cwd, m)) + const fileGenerates = generates + .filter((g) => g.isFile) + .map((g) => ({ + localPath: g.path, + containerPath: g.path, + })) + return [...mounts, ...fileGenerates] +} diff --git a/src/planner/utils/parse-container-work-node-ports.ts b/src/planner/utils/parse-container-work-node-ports.ts new file mode 100644 index 00000000..0a9adf7d --- /dev/null +++ b/src/planner/utils/parse-container-work-node-ports.ts @@ -0,0 +1,13 @@ +import { WorkContext } from '../work-context' +import { WorkPort } from '../work-port' +import { templateValue } from './template-value' +import { parseWorkPort } from './parse-work-port' +import { PlannedTask } from './planned-task' + +export function parseContainerWorkNodePorts( + task: PlannedTask, + context: WorkContext, + envs: { [key: string]: string } | null +): WorkPort[] { + return task.ports.map((m) => templateValue(m, envs)).map((m) => parseWorkPort(m)) +} diff --git a/src/planner/utils/parse-local-work-node-generate.ts b/src/planner/utils/parse-local-work-node-generate.ts new file mode 100644 index 00000000..22532554 --- /dev/null +++ b/src/planner/utils/parse-local-work-node-generate.ts @@ -0,0 +1,22 @@ +import { WorkContext } from '../work-context' +import { WorkNodeGenerate } from '../work-node' +import { extname, join } from 'path' +import { templateValue } from './template-value' +import { PlannedTask } from './planned-task' + +export function parseLocalWorkNodeGenerate( + task: PlannedTask, + context: WorkContext, + envs: { [key: string]: string } | null +): WorkNodeGenerate[] { + return task.generates.map((g) => { + const filePath = join(context.cwd, templateValue(g.path, envs)) + return { + path: filePath, + resetOnChange: g.resetOnChange, + export: g.export, + isFile: extname(g.path).length > 1, + inherited: g.inherited, + } + }) +} diff --git a/src/planner/utils/parse-local-work-node-source.ts b/src/planner/utils/parse-local-work-node-source.ts new file mode 100644 index 00000000..3173cc96 --- /dev/null +++ b/src/planner/utils/parse-local-work-node-source.ts @@ -0,0 +1,18 @@ +import { WorkContext } from '../work-context' +import { WorkNodeSource } from '../work-node-source' +import { templateValue } from './template-value' +import { mapSource } from './map-source' +import { PlannedTask } from './planned-task' + +export function parseLocalWorkNodeSource( + task: PlannedTask, + context: WorkContext, + envs: { [key: string]: string } | null +): WorkNodeSource[] { + return task.src + .map((src) => ({ + relativePath: templateValue(src.relativePath, envs), + matcher: src.matcher, + })) + .map((src) => mapSource(src, context.cwd)) +} diff --git a/src/planner/utils/parse-work-node-mount.spec.ts b/src/planner/utils/parse-work-mount.spec.ts similarity index 69% rename from src/planner/utils/parse-work-node-mount.spec.ts rename to src/planner/utils/parse-work-mount.spec.ts index 30ad4ed8..8bc40489 100644 --- a/src/planner/utils/parse-work-node-mount.spec.ts +++ b/src/planner/utils/parse-work-mount.spec.ts @@ -1,4 +1,4 @@ -import { parseWorkNodeMount } from './parse-work-node-mount' +import { parseWorkMount } from './parse-work-mount' import { join, sep, posix } from 'path' import { homedir } from 'os' @@ -8,56 +8,56 @@ function normalizePath(val: string): string { describe('parse-work-node-mount', () => { it('should parse "subdir"', () => { - expect(parseWorkNodeMount('/home/test', 'subdir')).toEqual({ + expect(parseWorkMount('/home/test', 'subdir')).toEqual({ localPath: normalizePath('/home/test/subdir'), containerPath: normalizePath('/home/test/subdir'), }) }) it('should parse "./subdir"', () => { - expect(parseWorkNodeMount('/home/test', './subdir')).toEqual({ + expect(parseWorkMount('/home/test', './subdir')).toEqual({ localPath: normalizePath('/home/test/subdir'), containerPath: normalizePath('/home/test/subdir'), }) }) it('should parse "./subdir:./otherdir"', () => { - expect(parseWorkNodeMount('/home/test', './subdir:./otherdir')).toEqual({ + expect(parseWorkMount('/home/test', './subdir:./otherdir')).toEqual({ localPath: normalizePath('/home/test/subdir'), containerPath: normalizePath('/home/test/otherdir'), }) }) it('should parse "$PWD/subdir:/subdir"', () => { - expect(parseWorkNodeMount('/home/test', '$PWD/subdir:/subdir')).toEqual({ + expect(parseWorkMount('/home/test', '$PWD/subdir:/subdir')).toEqual({ localPath: join(homedir(), 'subdir'), containerPath: '/subdir', }) }) it('should parse "$PWD/subdir:$PWD/subdir"', () => { - expect(parseWorkNodeMount('/home/test', '$PWD/subdir:$PWD/subdir')).toEqual({ + expect(parseWorkMount('/home/test', '$PWD/subdir:$PWD/subdir')).toEqual({ localPath: join(homedir(), 'subdir'), containerPath: normalizePath('/home/test/subdir'), }) }) it('should parse "/subdir:/otherdir"', () => { - expect(parseWorkNodeMount('/home/test', '/subdir:/otherdir')).toEqual({ + expect(parseWorkMount('/home/test', '/subdir:/otherdir')).toEqual({ localPath: '/subdir', containerPath: '/otherdir', }) }) it('should parse "/subdir:otherdir"', () => { - expect(parseWorkNodeMount('/home/test', '/subdir:otherdir')).toEqual({ + expect(parseWorkMount('/home/test', '/subdir:otherdir')).toEqual({ localPath: '/subdir', containerPath: normalizePath('/home/test/otherdir'), }) }) it('should parse "subdir:/otherdir"', () => { - expect(parseWorkNodeMount('/home/test', 'subdir:/otherdir')).toEqual({ + expect(parseWorkMount('/home/test', 'subdir:/otherdir')).toEqual({ localPath: normalizePath('/home/test/subdir'), containerPath: '/otherdir', }) diff --git a/src/planner/utils/parse-work-node-mount.ts b/src/planner/utils/parse-work-mount.ts similarity index 88% rename from src/planner/utils/parse-work-node-mount.ts rename to src/planner/utils/parse-work-mount.ts index d3522d1c..ca02f284 100644 --- a/src/planner/utils/parse-work-node-mount.ts +++ b/src/planner/utils/parse-work-mount.ts @@ -2,7 +2,7 @@ import { homedir } from 'os' import { normalizePath } from './normalize-path' import { WorkMount } from '../work-mount' -export function parseWorkNodeMount(cwd: string, dir: string): WorkMount { +export function parseWorkMount(cwd: string, dir: string): WorkMount { const parts = dir.split(':') if (parts.length === 1) { return parseLocalMount(cwd, dir, dir) diff --git a/src/planner/utils/parse-work-node-command.ts b/src/planner/utils/parse-work-node-command.ts new file mode 100644 index 00000000..0b270f5c --- /dev/null +++ b/src/planner/utils/parse-work-node-command.ts @@ -0,0 +1,26 @@ +import { WorkContext } from '../work-context' +import { WorkNodeCommand } from '../work-node-command' +import { planWorkCommand } from './plan-work-command' +import { templateValue } from './template-value' +import { PlannedTask } from './planned-task' + +export function parseWorkNodeCommand( + task: PlannedTask, + context: WorkContext, + envs: { [key: string]: string } | null +): WorkNodeCommand[] { + return planWorkCommand( + task.cmds.map((cmd) => { + if (typeof cmd === 'string') { + return templateValue(cmd, envs) + } else { + return { + cmd: templateValue(cmd.cmd, envs), + path: templateValue(cmd.path, envs), + type: cmd.type, + } + } + }), + context.cwd + ) +} diff --git a/src/planner/utils/parse-work-node-needs.ts b/src/planner/utils/parse-work-node-needs.ts new file mode 100644 index 00000000..500d0e01 --- /dev/null +++ b/src/planner/utils/parse-work-node-needs.ts @@ -0,0 +1,83 @@ +import { WorkContext } from '../work-context' +import { BaseWorkService, WorkService } from '../work-service' +import { getWorkServiceId } from '../work-service-id' +import { templateValue } from './template-value' +import { parseWorkPort } from './parse-work-port' +import { parseWorkMount } from './parse-work-mount' +import { parseWorkVolume } from './parse-work-volume' +import { getDefaultKubeConfig } from './get-default-kube-config' +import { BuildFileNameSelector, findBuildService } from './find-build-value' +import { ExecutionBuildService } from '../../parser/build-file-service' +import { assignDependencies } from './assign-dependencies' + +export function getWorkService(context: WorkContext, selector: BuildFileNameSelector): WorkService { + const service = findBuildService(context, selector) + const id = getWorkServiceId(service.context.build, service.result) + + if (context.workTree.services[id]) { + return context.workTree.services[id] + } + + const value = parseWorkNodeNeeds(id, service.name, service.result, service.context) + context.workTree.services[id] = value + + assignDependencies( + (service.result.deps || []).map((dep) => ({ + name: dep, + context: service.context, + build: service.context.build, + })), + value + ) + + for (const need of service.result.needs || []) { + value.needs.push(getWorkService(service.context, { name: need })) + } + + return value +} + +export function parseWorkNodeNeeds( + id: string, + name: string, + service: ExecutionBuildService, + context: WorkContext +): WorkService { + const workService: BaseWorkService = { + id, + buildService: service, + name: [...context.namePrefix, name].join(':'), + description: service.description, + ports: (service.ports || []).map((m) => templateValue(m, service.envs)).map((m) => parseWorkPort(m)), + needs: [], + deps: [], + } + if (service.image) { + return { + ...workService, + type: 'container-service', + cmd: service.cmd, + envs: service.envs || {}, + image: service.image, + cwd: service.cmd ? context.cwd : null, + // user: getContainerUser(), + healthcheck: service.healthcheck, + mounts: (service.mounts || []) + .map((m) => templateValue(m, service.envs)) + .map((m) => parseWorkMount(context.build.path, m)), + volumes: (service.volumes || []) + .map((m) => templateValue(m, service.envs)) + .map((m) => parseWorkVolume(context.build.path, m)), + } + } else if (!!service.context && !!service.selector) { + return { + ...workService, + context: service.context, + selector: service.selector, + kubeconfig: service.kubeconfig ?? getDefaultKubeConfig(), + type: 'kubernetes-service', + } + } else { + throw new Error(`unknown service ${name}`) + } +} diff --git a/src/planner/utils/parse-work-node.ts b/src/planner/utils/parse-work-node.ts new file mode 100644 index 00000000..b586a9e0 --- /dev/null +++ b/src/planner/utils/parse-work-node.ts @@ -0,0 +1,61 @@ +import { PlannedTask } from './planned-task' +import { WorkContext } from '../work-context' +import { BaseWorkNode, WorkNode } from '../work-node' +import { parseLocalWorkNodeGenerate } from './parse-local-work-node-generate' +import { templateValue } from './template-value' +import { parseWorkNodeCommand } from './parse-work-node-command' +import { parseLocalWorkNodeSource } from './parse-local-work-node-source' +import { mapLabels } from './map-labels' +import { getMounts } from './get-mounts' +import { parseContainerWorkNodeMount } from './parse-container-work-node-mount' +import { getContainerUser } from './get-container-user' +import { parseContainerWorkNodePorts } from './parse-container-work-node-ports' +import { getContainerVolumes } from './plan-work-volume' +import { getWorkService } from './parse-work-node-needs' + +export function parseWorkNode(id: string, task: PlannedTask, context: WorkContext): WorkNode { + const name = [...context.namePrefix, task.name].join(':') + + const generates = parseLocalWorkNodeGenerate(task, context, task.envs) + const baseWorkNode: BaseWorkNode = { + envs: task.envs, + id, + description: templateValue(task.description, task.envs), + continuous: task.continuous, + name, + cwd: task.cwd, + cmds: parseWorkNodeCommand(task, context, task.envs), + deps: [], + buildFile: task.build, + taskName: task.name, + src: parseLocalWorkNodeSource(task, context, task.envs), + generates, + plannedTask: task, + needs: [], + labels: mapLabels(task.labels), + caching: task.cache ?? null, + } + + for (const service of task.needs) { + baseWorkNode.needs.push(getWorkService(service.context, { name: service.name })) + } + + if (task.image) { + const mounts = getMounts(baseWorkNode, parseContainerWorkNodeMount(task, context, generates, task.envs)) + return { + ...baseWorkNode, + type: 'container', + user: getContainerUser(), + image: templateValue(task.image, task.envs), + shell: templateValue(task.shell, task.envs) || '/bin/sh', + mounts, + ports: parseContainerWorkNodePorts(task, context, task.envs), + volumes: getContainerVolumes(baseWorkNode, mounts), + } + } else { + return { + ...baseWorkNode, + type: 'local', + } + } +} diff --git a/src/planner/utils/parse-work-port.spec.ts b/src/planner/utils/parse-work-port.spec.ts new file mode 100644 index 00000000..44a2eddf --- /dev/null +++ b/src/planner/utils/parse-work-port.spec.ts @@ -0,0 +1,17 @@ +import { parseWorkPort } from './parse-work-port' + +describe('parse-work-port', () => { + it('should parse 5432', () => { + expect(parseWorkPort('5432')).toEqual({ + hostPort: 5432, + containerPort: 5432, + }) + }) + + it('should parse 5432:5433', () => { + expect(parseWorkPort('5432:5433')).toEqual({ + hostPort: 5432, + containerPort: 5433, + }) + }) +}) diff --git a/src/planner/utils/parse-work-node-port.ts b/src/planner/utils/parse-work-port.ts similarity index 82% rename from src/planner/utils/parse-work-node-port.ts rename to src/planner/utils/parse-work-port.ts index 7c8a1ac9..b52c2e75 100644 --- a/src/planner/utils/parse-work-node-port.ts +++ b/src/planner/utils/parse-work-port.ts @@ -1,6 +1,6 @@ -import { WorkNodePort } from '../work-node-port' +import { WorkPort } from '../work-port' -export function parseWorkNodePort(port: string): WorkNodePort { +export function parseWorkPort(port: string): WorkPort { const parts = port.split(':') if (parts.length === 1) { const port = parsePort(parts[0]) diff --git a/src/planner/utils/parse-work-volume.spec.ts b/src/planner/utils/parse-work-volume.spec.ts new file mode 100644 index 00000000..dccf79da --- /dev/null +++ b/src/planner/utils/parse-work-volume.spec.ts @@ -0,0 +1,12 @@ +import { parseWorkVolume } from './parse-work-volume' + +describe('parse-work-volume', () => { + it('should parse projdata:/usr/data', () => { + expect(parseWorkVolume('/home/user/proj', 'projdata:/usr/data')).toEqual({ + name: 'projdata', + containerPath: '/usr/data', + resetOnChange: false, + inherited: false, + }) + }) +}) diff --git a/src/planner/utils/parse-work-service-volume.ts b/src/planner/utils/parse-work-volume.ts similarity index 85% rename from src/planner/utils/parse-work-service-volume.ts rename to src/planner/utils/parse-work-volume.ts index c450fa55..22fa17e3 100644 --- a/src/planner/utils/parse-work-service-volume.ts +++ b/src/planner/utils/parse-work-volume.ts @@ -1,7 +1,7 @@ import { normalizePath } from './normalize-path' import { WorkVolume } from '../work-volume' -export function parseWorkServiceVolume(cwd: string, dir: string): WorkVolume { +export function parseWorkVolume(cwd: string, dir: string): WorkVolume { const parts = dir.split(':') if (parts.length === 2) { return parseVolume(cwd, parts[0], parts[1]) diff --git a/src/planner/utils/plan-task.ts b/src/planner/utils/plan-task.ts new file mode 100644 index 00000000..ab44a857 --- /dev/null +++ b/src/planner/utils/plan-task.ts @@ -0,0 +1,69 @@ +import { WorkContext } from '../work-context' +import { BuildTaskResult, findBuildTask } from './find-build-value' +import { BuildFileTask } from '../../parser/build-file-task' +import { PlannedTask } from './planned-task' +import { mapGenerate } from './map-generate' +import { BuildFileReference } from './build-file-reference' + +export function planTask(workContext: WorkContext, buildTaskResult: BuildTaskResult): PlannedTask { + let extendedTask: BuildTaskResult | null = null + + if (buildTaskResult.result.extend) { + extendedTask = findBuildTask(workContext, { name: buildTaskResult.result.extend }) + if (extendedTask.result.extend) { + throw new Error(`nested extend ${extendedTask.name} is not allowed for task ${buildTaskResult.name}`) + } + } + + const envs = { + ...(extendedTask?.result?.envs || {}), + ...buildTaskResult.context.build.envs, + ...(buildTaskResult.result.envs || {}), + } + + const getReferences = (task: BuildTaskResult | null, prop: 'needs' | 'deps'): BuildFileReference[] => { + if (!task) { + return [] + } + + const value = task.result[prop] + if (!value) { + return [] + } + + return value.map((d) => ({ + name: d, + build: buildTaskResult.context.build, + context: task.context, + })) + } + + const mergeReferences = (first: BuildFileReference[], second: BuildFileReference[]): BuildFileReference[] => { + return [...first, ...second] + } + + return { + buildTask: buildTaskResult.result, + build: buildTaskResult.context.build, + name: buildTaskResult.name, + cache: buildTaskResult.result.cache ?? extendedTask?.result?.cache ?? null, + description: buildTaskResult.result.description ?? extendedTask?.result?.description ?? null, + cwd: workContext.cwd, + image: buildTaskResult.result.image ?? extendedTask?.result?.image ?? null, + platform: buildTaskResult.result.platform ?? extendedTask?.result?.platform ?? null, + mounts: buildTaskResult.result.mounts || extendedTask?.result?.mounts || [], + generates: (buildTaskResult.result.generates || extendedTask?.result?.generates || []).map((g) => mapGenerate(g)), + shell: buildTaskResult.result.shell ?? extendedTask?.result?.shell ?? null, + ports: buildTaskResult.result.ports || extendedTask?.result?.ports || [], + src: buildTaskResult.result.src || extendedTask?.result?.src || [], + cmds: buildTaskResult.result.cmds || extendedTask?.result?.cmds || [], + continuous: buildTaskResult.result.continuous ?? extendedTask?.result?.continuous ?? false, + labels: { + ...(extendedTask?.result?.labels || {}), + ...(buildTaskResult.result.labels || {}), + }, + envs, + deps: mergeReferences(getReferences(extendedTask, 'deps'), getReferences(buildTaskResult, 'deps')), + needs: mergeReferences(getReferences(extendedTask, 'needs'), getReferences(buildTaskResult, 'needs')), + } +} diff --git a/src/planner/utils/plan-work-dependency.ts b/src/planner/utils/plan-work-dependency.ts deleted file mode 100644 index eab1fb0c..00000000 --- a/src/planner/utils/plan-work-dependency.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { isContainerWorkNode, WorkNode } from '../work-node' -import { getVolumeName } from './plan-work-volume' - -export function planWorkDependency(deps: WorkNode[], node: WorkNode): void { - for (const depNode of deps) { - if (node.deps.some((d) => d.id === depNode.id)) { - continue - } - - node.deps.push(depNode) - - for (const src of depNode.src) { - if (node.src.indexOf(src) === -1) { - node.src.push(src) - - if (isContainerWorkNode(node)) { - node.mounts.push({ - localPath: src.absolutePath, - containerPath: src.absolutePath, - }) - } - } - } - - if (isContainerWorkNode(depNode) && isContainerWorkNode(node)) { - for (const volume of depNode.volumes) { - if (!node.volumes.some((v) => v.name === volume.name)) { - node.volumes.push({ - ...volume, - inherited: true, - }) - } - } - } - - planWorkDependency(depNode.deps, node) - } -} diff --git a/src/planner/utils/plan-work-node.ts b/src/planner/utils/plan-work-node.ts index cde479b3..88e5015d 100644 --- a/src/planner/utils/plan-work-node.ts +++ b/src/planner/utils/plan-work-node.ts @@ -1,172 +1,12 @@ -import { BuildFile } from '../../parser/build-file' -import { BaseWorkNode, WorkNode, WorkNodeGenerate } from '../work-node' -import { createSubWorkContext, WorkContext } from '../work-context' -import { templateValue } from './template-value' -import { planWorkCommand } from './plan-work-command' -import { splitName } from './split-name' -import { parseWorkNodeMount } from './parse-work-node-mount' -import { planWorkDependency } from './plan-work-dependency' -import { extname, join } from 'path' -import { BuildFileTaskSource } from '../../parser/build-file-task-source' -import { WorkNodeSource } from '../work-node-source' -import { BuildFileTask, BuildFileTaskGenerate } from '../../parser/build-file-task' -import { WorkNodeCommand } from '../work-node-command' +import { WorkNode } from '../work-node' +import { WorkContext } from '../work-context' +import { assignDependencies } from './assign-dependencies' import { getWorkNodeId } from '../work-node-id' -import { WorkNodePort } from '../work-node-port' -import { parseWorkNodePort } from './parse-work-node-port' -import { BaseWorkService, WorkService } from '../work-service' -import { getWorkServiceId } from '../work-service-id' -import { BuildFileTaskPlatform } from '../../parser/build-file-task-platform' -import { BuildTaskCommand } from '../../parser/build-file-task-command' -import { CacheMethod } from '../../parser/cache-method' -import { LabelValues } from '../../executer/label-values' -import { homedir, platform } from 'os' -import { parseWorkServiceVolume } from './parse-work-service-volume' -import { WorkMount } from '../work-mount' -import { getContainerVolumes } from './plan-work-volume' -import { getContainerMounts } from './get-container-mounts' -import { normalizePath } from './normalize-path' +import { BuildFileNameSelector, findBuildTask } from './find-build-value' +import { planTask } from './plan-task' +import { parseWorkNode } from './parse-work-node' -export interface BuildFileReference { - build: BuildFile - name: string - context: WorkContext -} - -export interface PlannedTask { - build: BuildFile - buildTask: BuildFileTask - name: string - cwd: string - continuous: boolean - deps: BuildFileReference[] - src: BuildFileTaskSource[] - platform: BuildFileTaskPlatform | null - description: string | null - shell: string | null - generates: WorkNodeGenerate[] - image: string | null - mounts: string[] - cmds: BuildTaskCommand[] - needs: BuildFileReference[] - envs: { [key: string]: string } - ports: string[] - labels: { [key: string]: string } - cache: CacheMethod | null -} - -export function planTask(workContext: WorkContext, buildTaskResult: BuildTaskResult): PlannedTask { - let extendedTask: BuildTaskResult | null = null - - if (buildTaskResult.task.extend) { - extendedTask = findBuildTask(workContext, { taskName: buildTaskResult.task.extend }) - if (extendedTask.task.extend) { - throw new Error(`nested extend ${extendedTask.name} is not allowed for task ${buildTaskResult.name}`) - } - } - - const envs = { - ...(extendedTask?.task?.envs || {}), - ...buildTaskResult.context.build.envs, - ...(buildTaskResult.task.envs || {}), - } - - const getReferences = (task: BuildTaskResult | null, prop: 'needs' | 'deps'): BuildFileReference[] => { - if (!task) { - return [] - } - - const value = task.task[prop] - if (!value) { - return [] - } - - return value.map((d) => ({ - name: d, - build: buildTaskResult.context.build, - context: task.context, - })) - } - - const mergeReferences = (first: BuildFileReference[], second: BuildFileReference[]): BuildFileReference[] => { - return [...first, ...second] - } - - return { - buildTask: buildTaskResult.task, - build: buildTaskResult.context.build, - name: buildTaskResult.name, - cache: buildTaskResult.task.cache ?? extendedTask?.task?.cache ?? null, - description: buildTaskResult.task.description ?? extendedTask?.task?.description ?? null, - cwd: workContext.cwd, - image: buildTaskResult.task.image ?? extendedTask?.task?.image ?? null, - platform: buildTaskResult.task.platform ?? extendedTask?.task?.platform ?? null, - mounts: buildTaskResult.task.mounts || extendedTask?.task?.mounts || [], - generates: (buildTaskResult.task.generates || extendedTask?.task?.generates || []).map((g) => mapGenerate(g)), - shell: buildTaskResult.task.shell ?? extendedTask?.task?.shell ?? null, - ports: buildTaskResult.task.ports || extendedTask?.task?.ports || [], - src: buildTaskResult.task.src || extendedTask?.task?.src || [], - cmds: buildTaskResult.task.cmds || extendedTask?.task?.cmds || [], - continuous: buildTaskResult.task.continuous ?? extendedTask?.task?.continuous ?? false, - labels: { - ...(extendedTask?.task?.labels || {}), - ...(buildTaskResult.task.labels || {}), - }, - envs, - deps: mergeReferences(getReferences(extendedTask, 'deps'), getReferences(buildTaskResult, 'deps')), - needs: mergeReferences(getReferences(extendedTask, 'needs'), getReferences(buildTaskResult, 'needs')), - } -} - -function mapGenerate(generate: string | BuildFileTaskGenerate): WorkNodeGenerate { - if (typeof generate === 'string') { - return { path: generate, resetOnChange: false, export: false, inherited: false, isFile: false } - } else { - return { - path: generate.path, - resetOnChange: generate.resetOnChange ?? false, - export: generate.export ?? false, - inherited: false, - isFile: false, - } - } -} - -export type BuildTaskResult = { task: BuildFileTask; name: string; context: WorkContext } -export type BuildTaskSelector = { taskName: string } - -export function findBuildTask(context: WorkContext, selector: BuildTaskSelector): BuildTaskResult { - if (context.build.tasks[selector.taskName]) { - return { task: context.build.tasks[selector.taskName], context, name: selector.taskName } - } else { - const ref = splitName(selector.taskName) - if (ref.prefix) { - if (context.build.references[ref.prefix]) { - return findBuildTask( - createSubWorkContext(context, { - name: ref.prefix, - type: 'references', - }), - { taskName: ref.taskName } - ) - } else if (context.build.includes[ref.prefix]) { - return findBuildTask( - createSubWorkContext(context, { - name: ref.prefix, - type: 'includes', - }), - { - taskName: ref.taskName, - } - ) - } - } - - throw new Error(`unable to find ${selector.taskName} in ${context.build.path}`) - } -} - -export function getWorkNode(context: WorkContext, selector: BuildTaskSelector): WorkNode { +export function getWorkNode(context: WorkContext, selector: BuildFileNameSelector): WorkNode { const rootNode = findBuildTask(context, selector) const plannedTask = planTask(rootNode.context, rootNode) @@ -178,214 +18,7 @@ export function getWorkNode(context: WorkContext, selector: BuildTaskSelector): const node = parseWorkNode(id, plannedTask, rootNode.context) context.workTree.nodes[id] = node - const depNodes: WorkNode[] = [] - for (const plannedDep of plannedTask.deps) { - const depName = templateValue(plannedDep.name, plannedDep.build.envs) - const depNode = getWorkNode(plannedDep.context, { taskName: depName }) - if (!depNodes.some((d) => d.id === depNode.id)) { - depNodes.push(depNode) - } - } - - planWorkDependency(depNodes, node) + assignDependencies(plannedTask.deps, node) return node } - -export function mapLabels(labels: { [key: string]: string }): LabelValues { - const result: LabelValues = {} - for (const [key, value] of Object.entries(labels)) { - result[key] = [value] - } - return result -} - -function parseWorkNode(id: string, task: PlannedTask, context: WorkContext): WorkNode { - const name = [...context.namePrefix, task.name].join(':') - - const generates = parseLocalWorkNodeGenerate(task, context, task.envs) - const baseWorkNode: BaseWorkNode = { - envs: task.envs, - id, - description: templateValue(task.description, task.envs), - continuous: task.continuous, - name, - cwd: task.cwd, - cmds: parseWorkNodeCommand(task, context, task.envs), - deps: [], - buildFile: task.build, - taskName: task.name, - src: parseLocalWorkNodeSource(task, context, task.envs), - generates, - plannedTask: task, - needs: parseWorkNodeNeeds(task.needs, context), - labels: mapLabels(task.labels), - caching: task.cache ?? null, - } - - if (task.image) { - const mounts = getContainerMounts(baseWorkNode, parseContainerWorkNodeMount(task, context, generates, task.envs)) - return { - ...baseWorkNode, - type: 'container', - user: getContainerUser(), - image: templateValue(task.image, task.envs), - shell: templateValue(task.shell, task.envs) || '/bin/sh', - mounts, - ports: parseContainerWorkNodePorts(task, context, task.envs), - volumes: getContainerVolumes(baseWorkNode, mounts), - } - } else { - return { - ...baseWorkNode, - type: 'local', - } - } -} - -export function parseContainerWorkNodePorts( - task: PlannedTask, - context: WorkContext, - envs: { [key: string]: string } | null -): WorkNodePort[] { - return task.ports.map((m) => templateValue(m, envs)).map((m) => parseWorkNodePort(m)) -} - -function parseContainerWorkNodeMount( - task: PlannedTask, - context: WorkContext, - generates: WorkNodeGenerate[], - envs: { [key: string]: string } | null -): WorkMount[] { - const mounts = task.mounts.map((m) => templateValue(m, envs)).map((m) => parseWorkNodeMount(context.cwd, m)) - const fileGenerates = generates - .filter((g) => g.isFile) - .map((g) => ({ - localPath: g.path, - containerPath: g.path, - })) - return [...mounts, ...fileGenerates] -} - -function parseLocalWorkNodeGenerate( - task: PlannedTask, - context: WorkContext, - envs: { [key: string]: string } | null -): WorkNodeGenerate[] { - return task.generates.map((g) => { - const filePath = join(context.cwd, templateValue(g.path, envs)) - return { - path: filePath, - resetOnChange: g.resetOnChange, - export: g.export, - isFile: extname(g.path).length > 1, - inherited: g.inherited, - } - }) -} - -function parseLocalWorkNodeSource( - task: PlannedTask, - context: WorkContext, - envs: { [key: string]: string } | null -): WorkNodeSource[] { - return task.src - .map((src) => ({ - relativePath: templateValue(src.relativePath, envs), - matcher: src.matcher, - })) - .map((src) => mapSource(src, context.cwd)) -} - -function getContainerUser(): string | null { - return platform() === 'linux' || platform() === 'freebsd' || platform() === 'openbsd' || platform() === 'sunos' - ? `${process.getuid()}:${process.getgid()}` - : null -} - -export function parseWorkNodeNeeds(needs: BuildFileReference[], context: WorkContext): WorkService[] { - const result: WorkService[] = [] - - for (const need of needs) { - const service = need.build.services[need.name] - const id = getWorkServiceId(need.build, service) - if (!context.workTree.services[id]) { - const workService: BaseWorkService = { - id, - buildService: service, - name: need.name, - description: service.description, - ports: (service.ports || []).map((m) => templateValue(m, service.envs)).map((m) => parseWorkNodePort(m)), - } - if (service.image) { - context.workTree.services[id] = { - ...workService, - type: 'container-service', - cmd: service.cmd, - envs: service.envs || {}, - image: service.image, - // user: getContainerUser(), - healthcheck: service.healthcheck, - mounts: (service.mounts || []) - .map((m) => templateValue(m, service.envs)) - .map((m) => parseWorkNodeMount(need.build.path, m)), - volumes: (service.volumes || []) - .map((m) => templateValue(m, service.envs)) - .map((m) => parseWorkServiceVolume(need.build.path, m)), - } - } else if (!!service.context && !!service.selector) { - context.workTree.services[id] = { - ...workService, - context: service.context, - selector: service.selector, - kubeconfig: service.kubeconfig ?? getDefaultKubeConfig(), - type: 'kubernetes-service', - } - } - } - if (!result.some((s) => s.id === id)) { - result.push(context.workTree.services[id]) - } - } - - return result -} - -function getDefaultKubeConfig(): string { - return join(homedir(), '.kube/config') -} - -function parseWorkNodeCommand( - task: PlannedTask, - context: WorkContext, - envs: { [key: string]: string } | null -): WorkNodeCommand[] { - return planWorkCommand( - task.cmds.map((cmd) => { - if (typeof cmd === 'string') { - return templateValue(cmd, envs) - } else { - return { - cmd: templateValue(cmd.cmd, envs), - path: templateValue(cmd.path, envs), - type: cmd.type, - } - } - }), - context.cwd - ) -} - -export function mapSource(src: BuildFileTaskSource, workDir: string): WorkNodeSource { - return { - matcher: src.matcher, - absolutePath: join(workDir, src.relativePath), - } -} - -export function getAbsolutePaths(dirs: string[] | null, workingDir: string): string[] { - if (!dirs) { - return [] - } - return dirs.map((dir) => join(workingDir, dir)) -} diff --git a/src/planner/utils/plan-work-nodes.ts b/src/planner/utils/plan-work-nodes.ts index a1752ceb..d05a16ff 100644 --- a/src/planner/utils/plan-work-nodes.ts +++ b/src/planner/utils/plan-work-nodes.ts @@ -90,7 +90,7 @@ function addWorkNodes(context: WorkContext, files: string[]) { files.push(context.build.fileName) for (const taskName of Object.keys(context.build.tasks)) { - getWorkNode(context, { taskName }) + getWorkNode(context, { name: taskName }) } for (const name of Object.keys(context.build.references)) { diff --git a/src/planner/utils/plan-work-tree.ts b/src/planner/utils/plan-work-tree.ts index af2ce3c4..69fa5fb0 100644 --- a/src/planner/utils/plan-work-tree.ts +++ b/src/planner/utils/plan-work-tree.ts @@ -7,7 +7,7 @@ import { WorkTaskScope } from '../../executer/work-scope' export function planWorkTree(build: BuildFile, options: WorkTaskScope): WorkTree & { rootNode: WorkNode } { const context = createWorkContext(build) - const result = getWorkNode(context, { taskName: options.taskName }) + const result = getWorkNode(context, { name: options.taskName }) return { ...context.workTree, rootNode: result, diff --git a/src/planner/utils/planned-task.ts b/src/planner/utils/planned-task.ts new file mode 100644 index 00000000..9153e443 --- /dev/null +++ b/src/planner/utils/planned-task.ts @@ -0,0 +1,30 @@ +import { BuildFile } from '../../parser/build-file' +import { BuildFileTask } from '../../parser/build-file-task' +import { BuildFileTaskSource } from '../../parser/build-file-task-source' +import { BuildFileTaskPlatform } from '../../parser/build-file-task-platform' +import { WorkNodeGenerate } from '../work-node' +import { BuildTaskCommand } from '../../parser/build-file-task-command' +import { CacheMethod } from '../../parser/cache-method' +import { BuildFileReference } from './build-file-reference' + +export interface PlannedTask { + build: BuildFile + buildTask: BuildFileTask + name: string + cwd: string + continuous: boolean + deps: BuildFileReference[] + src: BuildFileTaskSource[] + platform: BuildFileTaskPlatform | null + description: string | null + shell: string | null + generates: WorkNodeGenerate[] + image: string | null + mounts: string[] + cmds: BuildTaskCommand[] + needs: BuildFileReference[] + envs: { [key: string]: string } + ports: string[] + labels: { [key: string]: string } + cache: CacheMethod | null +} diff --git a/src/planner/utils/split-name.spec.ts b/src/planner/utils/split-name.spec.ts new file mode 100644 index 00000000..e84e8057 --- /dev/null +++ b/src/planner/utils/split-name.spec.ts @@ -0,0 +1,15 @@ +import { splitName } from './split-name' + +describe('split-name', () => { + it('should split test:bar', () => { + const result = splitName('test:bar') + expect(result.name).toEqual('bar') + expect(result.prefix).toEqual('test') + }) + + it('should not split test', () => { + const result = splitName('test') + expect(result.name).toEqual('test') + expect(result.prefix).toEqual(undefined) + }) +}) diff --git a/src/planner/utils/split-name.ts b/src/planner/utils/split-name.ts index e8027e56..d2dee505 100644 --- a/src/planner/utils/split-name.ts +++ b/src/planner/utils/split-name.ts @@ -1,8 +1,8 @@ -export function splitName(name: string): { prefix?: string; taskName: string } { +export function splitName(name: string): { prefix?: string; name: string } { const index = name.indexOf(':') if (index > 0) { - return { prefix: name.substr(0, index), taskName: name.substr(index + 1) } + return { prefix: name.substr(0, index), name: name.substr(index + 1) } } else { - return { taskName: name } + return { name } } } diff --git a/src/planner/utils/template-value.spec.ts b/src/planner/utils/template-value.spec.ts new file mode 100644 index 00000000..d3de5398 --- /dev/null +++ b/src/planner/utils/template-value.spec.ts @@ -0,0 +1,13 @@ +import { templateValue } from './template-value' + +describe('template-value', () => { + it('should template string value', () => { + const value = templateValue('echo $VAR', { VAR: 'hello' }) + expect(value).toEqual('echo hello') + }) + + it('should leave missing envs', () => { + const value = templateValue('echo $VAR', {}) + expect(value).toEqual('echo $VAR') + }) +}) diff --git a/src/planner/validate.ts b/src/planner/validate.ts index f6e4e548..3206a49f 100644 --- a/src/planner/validate.ts +++ b/src/planner/validate.ts @@ -2,7 +2,7 @@ import { WorkNode } from './work-node' import { WorkNodeValidation } from './work-node-validation' import { Environment } from '../executer/environment' import { WorkTree } from './work-tree' -import { KubernetesWorkService } from './work-service' +import { KubernetesWorkService, WorkService } from './work-service' import { read } from '../parser/read-build-file' export async function* validate(workTree: WorkTree, context: Environment): AsyncGenerator { @@ -79,7 +79,7 @@ export async function* validate(workTree: WorkTree, context: Environment): Async } if (cycleNodes.indexOf(node) === -1) { - const cyclePath = hasCycle(node, []) + const cyclePath = hasDependencyCycle(node, []) if (cyclePath) { cycleNodes.push(...cyclePath) yield { type: 'error', message: `task cycle detected ${cyclePath.map((n) => n.name).join(' -> ')}`, node: node } @@ -88,13 +88,28 @@ export async function* validate(workTree: WorkTree, context: Environment): Async } } -export function hasCycle(node: WorkNode, currentPath: WorkNode[]): WorkNode[] | null { +export function hasNeedCycle(node: WorkService, currentPath: WorkService[]): WorkService[] | null { + if (currentPath.indexOf(node) >= 0) { + return [...currentPath, node] + } + + for (const dep of node.needs) { + const depHasCycle = hasNeedCycle(dep, [...currentPath, node]) + if (depHasCycle) { + return depHasCycle + } + } + + return null +} + +export function hasDependencyCycle(node: WorkNode, currentPath: WorkNode[]): WorkNode[] | null { if (currentPath.indexOf(node) >= 0) { return [...currentPath, node] } for (const dep of node.deps) { - const depHasCycle = hasCycle(dep, [...currentPath, node]) + const depHasCycle = hasDependencyCycle(dep, [...currentPath, node]) if (depHasCycle) { return depHasCycle } diff --git a/src/planner/work-node-id.spec.ts b/src/planner/work-node-id.spec.ts index 85964afb..14296360 100644 --- a/src/planner/work-node-id.spec.ts +++ b/src/planner/work-node-id.spec.ts @@ -1,8 +1,9 @@ import { getWorkNodeId } from './work-node-id' import { createBuildFile } from '../testing/create-build-file' -import { findBuildTask, planTask } from './utils/plan-work-node' import { createWorkContext } from './work-context' import { environmentMock } from '../executer/environment-mock' +import { findBuildTask } from './utils/find-build-value' +import { planTask } from './utils/plan-task' async function compareTasks(firstTask: any, secondTask: any, expectEqual: boolean) { const environment = environmentMock(process.cwd()) @@ -17,9 +18,9 @@ async function compareTasks(firstTask: any, secondTask: any, expectEqual: boolea }, }) const firstContext = createWorkContext(firstBuildFile) - const firstMerged = planTask(firstContext, findBuildTask(firstContext, { taskName: 'test' })) + const firstMerged = planTask(firstContext, findBuildTask(firstContext, { name: 'test' })) const secondContext = createWorkContext(secondBuildFile) - const secondMerged = planTask(secondContext, findBuildTask(secondContext, { taskName: 'test' })) + const secondMerged = planTask(secondContext, findBuildTask(secondContext, { name: 'test' })) const firstNodeId = getWorkNodeId(firstMerged) const secondNodeId = getWorkNodeId(secondMerged) if (expectEqual) { diff --git a/src/planner/work-node-id.ts b/src/planner/work-node-id.ts index 7c010b5d..34826e65 100644 --- a/src/planner/work-node-id.ts +++ b/src/planner/work-node-id.ts @@ -1,6 +1,6 @@ import { createHash } from 'crypto' -import { PlannedTask } from './utils/plan-work-node' import { getWorkNodeCacheDescription } from '../optimizer/work-node-cache-description' +import { PlannedTask } from './utils/planned-task' export function getWorkNodeId(task: PlannedTask): string { const description = getWorkNodeCacheDescription(task) diff --git a/src/planner/work-node.ts b/src/planner/work-node.ts index ad7157fc..d24a7684 100644 --- a/src/planner/work-node.ts +++ b/src/planner/work-node.ts @@ -1,13 +1,13 @@ import { BuildFile } from '../parser/build-file' import { WorkNodeSource } from './work-node-source' import { WorkNodeCommand } from './work-node-command' -import { WorkNodePort } from './work-node-port' +import { WorkPort } from './work-port' import { WorkService } from './work-service' import { CacheMethod } from '../parser/cache-method' -import { PlannedTask } from './utils/plan-work-node' import { LabelValues } from '../executer/label-values' import { WorkMount } from './work-mount' import { WorkVolume } from './work-volume' +import { PlannedTask } from './utils/planned-task' export type WorkNode = LocalWorkNode | ContainerWorkNode @@ -48,7 +48,7 @@ export interface ContainerWorkNode extends BaseWorkNode { shell: string user: string | null mounts: WorkMount[] - ports: WorkNodePort[] + ports: WorkPort[] volumes: WorkVolume[] } diff --git a/src/planner/work-node-port.ts b/src/planner/work-port.ts similarity index 58% rename from src/planner/work-node-port.ts rename to src/planner/work-port.ts index d5283f16..9a7a3882 100644 --- a/src/planner/work-node-port.ts +++ b/src/planner/work-port.ts @@ -1,4 +1,4 @@ -export interface WorkNodePort { +export interface WorkPort { hostPort: number containerPort: number } diff --git a/src/planner/work-service.ts b/src/planner/work-service.ts index f6ecc7b7..bd2846df 100644 --- a/src/planner/work-service.ts +++ b/src/planner/work-service.ts @@ -1,4 +1,4 @@ -import { WorkNodePort } from './work-node-port' +import { WorkPort } from './work-port' import { ExecutionBuildService, ExecutionBuildServiceHealthCheck, @@ -12,7 +12,9 @@ export interface BaseWorkService { id: string name: string description: string | null - ports: WorkNodePort[] + ports: WorkPort[] + needs: WorkService[] + deps: WorkNode[] buildService: ExecutionBuildService } @@ -26,6 +28,7 @@ export interface ContainerWorkService extends BaseWorkService { envs: { [key: string]: string } image: string cmd: string | null + cwd: string | null //user: string | null mounts: WorkMount[] volumes: WorkVolume[] diff --git a/src/testing/integration/invalid_loop.spec.ts b/src/testing/integration/invalid_loop.spec.ts index 9c8bd0ed..790ed065 100644 --- a/src/testing/integration/invalid_loop.spec.ts +++ b/src/testing/integration/invalid_loop.spec.ts @@ -10,4 +10,10 @@ describe('invalid', () => { const result = await cli.exec() expect(result.success).toBeFalsy() }) + + it('should detect loop in services', async () => { + const { cli } = await suite.setup({ taskName: 'loopservice' }) + const result = await cli.exec() + expect(result.success).toBeFalsy() + }) }) diff --git a/src/testing/integration/kubernetes.spec.ts b/src/testing/integration/kubernetes.spec.ts index 690ed5e8..179fdf00 100644 --- a/src/testing/integration/kubernetes.spec.ts +++ b/src/testing/integration/kubernetes.spec.ts @@ -6,6 +6,10 @@ describe('kubernetes', () => { afterAll(() => suite.close()) + it('should', () => { + expect(true).toBeTrue() + }) + // CI setup missing xit('should forward deployment', async () => { const { cli, environment } = await suite.setup({ taskName: 'api' }) diff --git a/src/testing/integration/reference.spec.ts b/src/testing/integration/reference.spec.ts index d012a11b..7b1a1a34 100644 --- a/src/testing/integration/reference.spec.ts +++ b/src/testing/integration/reference.spec.ts @@ -18,6 +18,6 @@ describe('reference', () => { it('should list task with references tasks nested', async () => { const { cli } = await suite.setup(emptyWorkLabelScope()) const workNodes = cli.ls() - expect(workNodes.map((t) => t.item.name)).toEqual(['example', 'foo:bar', 'foo:sub:sub']) + expect(workNodes.map((t) => t.item.name)).toEqual(['foo:bardb', 'example', 'foo:bar', 'foo:sub:sub']) }) }) diff --git a/src/testing/integration/services.spec.ts b/src/testing/integration/services.spec.ts index 81375675..a1468984 100644 --- a/src/testing/integration/services.spec.ts +++ b/src/testing/integration/services.spec.ts @@ -4,6 +4,7 @@ import { expectSuccessfulResult } from '../expect' describe('services', () => { const suite = getTestSuite('services', [ 'index.js', + 'server.js', 'config.json', 'package.json', 'package-lock.json', @@ -12,9 +13,15 @@ describe('services', () => { afterAll(() => suite.close()) - it('should run service', async () => { + it('should run with needed service', async () => { const { cli, environment } = await suite.setup({ taskName: 'api' }) const result = await cli.exec() await expectSuccessfulResult(result, environment) }, 120000) + + it('should start needs of service', async () => { + const { cli, environment } = await suite.setup({ taskName: 'test' }) + const result = await cli.exec() + await expectSuccessfulResult(result, environment) + }, 120000) }) From e3e011ddbcbcb21c6f2b60c0d10d9b5df304a904 Mon Sep 17 00:00:00 2001 From: Pascal Bertschi Date: Sun, 8 Jan 2023 21:14:02 +0100 Subject: [PATCH 3/3] feat: up/down of services --- examples/services/.hammerkit.yaml | 5 +- src/cli.ts | 207 ++++++++------- src/executer/are-all-services-down.ts | 11 + src/executer/are-all-services-running.ts | 11 + src/executer/create-scheduler-state.ts | 2 + src/executer/docker-node.ts | 11 +- src/executer/docker-service.ts | 30 ++- src/executer/ensure-needs.ts | 63 +++++ src/executer/event-cache.ts | 14 +- src/executer/execute-docker.ts | 22 +- src/executer/execute.spec.ts | 4 +- src/executer/get-scheduler-down-result.ts | 10 + src/executer/get-scheduler-execute-result.ts | 10 + src/executer/get-scheduler-up-result.ts | 10 + src/executer/get-service-containers.ts | 17 ++ src/executer/has-ended-service.ts | 12 + src/executer/has-service-an-active-need.ts | 31 +++ src/executer/kubernetes-service.ts | 1 + src/executer/label-values.ts | 25 +- src/executer/schedule-down.ts | 19 ++ src/executer/schedule-execution.ts | 29 +++ src/executer/schedule-nodes.ts | 71 +++++ src/executer/schedule-pending-services.ts | 45 ++++ src/executer/schedule-ready-services.ts | 40 +++ src/executer/schedule-up.ts | 29 +++ src/executer/schedule.ts | 243 ------------------ src/executer/scheduler/scheduler-state.ts | 1 + src/executer/scheduler/service-state.ts | 1 + src/executer/unschedule-services.ts | 58 +++++ src/executer/work-scope.ts | 7 +- src/get-work-context.ts | 1 + src/planner/utils/parse-work-node-needs.ts | 2 + src/planner/utils/plan-work-nodes.ts | 36 ++- src/planner/work-service.ts | 2 + src/program.ts | 81 +++++- src/service/update-service-status.ts | 32 +++ src/testing/integration/cache.spec.ts | 2 +- src/testing/integration/cancellation.spec.ts | 2 +- src/testing/integration/clean.spec.ts | 8 +- src/testing/integration/cmd.spec.ts | 2 +- src/testing/integration/concurrency.spec.ts | 2 +- src/testing/integration/docker.spec.ts | 2 +- src/testing/integration/env.spec.ts | 10 +- src/testing/integration/error.spec.ts | 4 +- src/testing/integration/export.spec.ts | 4 +- src/testing/integration/glob.spec.ts | 4 +- src/testing/integration/include.spec.ts | 4 +- src/testing/integration/invalid.spec.ts | 2 +- src/testing/integration/invalid_loop.spec.ts | 4 +- src/testing/integration/kubernetes.spec.ts | 2 +- src/testing/integration/labels.spec.ts | 5 + src/testing/integration/local.spec.ts | 2 +- src/testing/integration/monorepo.spec.ts | 2 +- src/testing/integration/reference.spec.ts | 4 +- src/testing/integration/services.spec.ts | 16 +- src/testing/integration/store.spec.ts | 8 +- .../integration/watch-dependency.spec.ts | 2 +- src/testing/integration/watch.spec.ts | 4 +- 58 files changed, 872 insertions(+), 416 deletions(-) create mode 100644 src/executer/are-all-services-down.ts create mode 100644 src/executer/are-all-services-running.ts create mode 100644 src/executer/ensure-needs.ts create mode 100644 src/executer/get-scheduler-down-result.ts create mode 100644 src/executer/get-scheduler-execute-result.ts create mode 100644 src/executer/get-scheduler-up-result.ts create mode 100644 src/executer/get-service-containers.ts create mode 100644 src/executer/has-ended-service.ts create mode 100644 src/executer/has-service-an-active-need.ts create mode 100644 src/executer/schedule-down.ts create mode 100644 src/executer/schedule-execution.ts create mode 100644 src/executer/schedule-nodes.ts create mode 100644 src/executer/schedule-pending-services.ts create mode 100644 src/executer/schedule-ready-services.ts create mode 100644 src/executer/schedule-up.ts delete mode 100644 src/executer/schedule.ts create mode 100644 src/executer/unschedule-services.ts create mode 100644 src/service/update-service-status.ts diff --git a/examples/services/.hammerkit.yaml b/examples/services/.hammerkit.yaml index 0296d3af..089725f4 100644 --- a/examples/services/.hammerkit.yaml +++ b/examples/services/.hammerkit.yaml @@ -3,9 +3,10 @@ services: image: node:16.6.0-alpine deps: [install] needs: [postgres] + ports: + - 3000 labels: - stage: run - app: example + task: dev mounts: - server.js - config.json diff --git a/src/cli.ts b/src/cli.ts index bc11c453..250b7efe 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -7,7 +7,7 @@ import { iterateWorkNodes, iterateWorkServices } from './planner/utils/plan-work import { createSchedulerState } from './executer/create-scheduler-state' import { isCI } from './utils/ci' import { getLogger } from './console/get-logger' -import { schedule } from './executer/schedule' +import { scheduleExecution } from './executer/schedule-execution' import { cleanCache, restoreCache, storeCache } from './executer/event-cache' import { validate } from './planner/validate' import { WorkTree } from './planner/work-tree' @@ -17,10 +17,16 @@ import { ReadonlyState } from './executer/readonly-state' import { ProcessManager } from './executer/process-manager' import { WorkService } from './planner/work-service' import { removeContainer } from './docker/remove-container' +import { updateServiceStatus } from './service/update-service-status' +import { WorkLabelScope } from './executer/work-scope' +import { scheduleUp } from './executer/schedule-up' +import { scheduleDown } from './executer/schedule-down' +import { State } from './executer/state' export interface CliExecOptions { workers: number watch: boolean + daemon: boolean logMode: LogMode cacheDefault: CacheMethod } @@ -47,103 +53,126 @@ export const isCliTask = (val: CliItem): val is CliTaskItem => val.type === 'tas export const isCliService = (val: CliItem): val is CliServiceItem => val.type === 'service' export type CliItem = CliTaskItem | CliServiceItem -export interface Cli { - exec(options?: Partial): Promise - execWatch(options?: Partial): CliExecResult +export class Cli { + constructor(private workTree: WorkTree, private environment: Environment) {} - store(path: string): Promise + async setup( + scheduler: (process: ProcessManager, state: State, environment: Environment) => Promise, + options?: Partial + ): Promise { + const processManager = new ProcessManager(this.environment, options?.workers ?? 0) + const logMode: LogMode = options?.logMode ?? (isCI ? 'live' : 'interactive') + const state = createSchedulerState({ + daemon: options?.daemon ?? false, + services: this.workTree.services, + nodes: this.workTree.nodes, + watch: options?.watch ?? false, + logMode, + cacheMethod: options?.cacheDefault ?? 'checksum', + }) - restore(path: string): Promise + await updateServiceStatus(state, this.environment) - clean(options?: Partial): Promise + const logger = getLogger(logMode, state, this.environment) - validate(): AsyncGenerator - - ls(): CliItem[] - - shutdown(): Promise - - node(name: string): WorkNode -} + return { + state, + processManager, + start: async () => { + const result = await scheduler(processManager, state, this.environment) + await logger.complete(result, this.environment) + return result + }, + } + } -export function getCli(workTree: WorkTree, environment: Environment): Cli { - return { - execWatch(options?: Partial): CliExecResult { - const processManager = new ProcessManager(environment, options?.workers ?? 0) - const state = createSchedulerState({ - services: workTree.services, - nodes: workTree.nodes, - watch: options?.watch ?? false, - logMode: options?.logMode ?? isCI ? 'live' : 'interactive', - cacheMethod: options?.cacheDefault ?? 'checksum', - }) - const logMode: LogMode = options?.logMode ?? (isCI ? 'live' : 'interactive') - const logger = getLogger(logMode, state, environment) - - return { - state, - processManager, - start: async () => { - const result = await schedule(processManager, state, environment) - await logger.complete(result, environment) - return result + async shutdown(): Promise { + for (const node of iterateWorkNodes(this.workTree.nodes)) { + const containers = await this.environment.docker.listContainers({ + all: true, + filters: { + label: [`hammerkit-id=${node.id}`], }, + }) + for (const container of containers) { + await removeContainer(this.environment.docker.getContainer(container.Id)) } - }, - async shutdown(): Promise { - for (const node of iterateWorkNodes(workTree.nodes)) { - const containers = await environment.docker.listContainers({ - all: true, - filters: { - label: [`hammerkit-id=${node.id}`], - }, - }) - for (const container of containers) { - await removeContainer(environment.docker.getContainer(container.Id)) - } - } + } - for (const service of iterateWorkServices(workTree.services)) { - const containers = await environment.docker.listContainers({ - all: true, - filters: { - label: [`hammerkit-id=${service.id}`], - }, - }) - for (const container of containers) { - await removeContainer(environment.docker.getContainer(container.Id)) - } - } - }, - async exec(options?: Partial): Promise { - return this.execWatch(options).start() - }, - async clean(options?: Partial): Promise { - await cleanCache(workTree, environment, { - service: options?.service ?? false, + for (const service of iterateWorkServices(this.workTree.services)) { + const containers = await this.environment.docker.listContainers({ + all: true, + filters: { + label: [`hammerkit-id=${service.id}`], + }, }) - }, - async restore(path: string): Promise { - await restoreCache(path, workTree, environment) - }, - async store(path: string): Promise { - await storeCache(path, workTree, environment) - }, - ls(): CliItem[] { - return [ - ...Array.from(iterateWorkServices(workTree.services)).map((item) => ({ item, type: 'service' })), - ...Array.from(iterateWorkNodes(workTree.nodes)).map((item) => ({ item, type: 'task' })), - ] - }, - validate(): AsyncGenerator { - return validate(workTree, environment) - }, - node(name: string): WorkNode { - const node = Object.values(workTree.nodes).find((n) => n.name == name) - if (!node) { - throw new Error(`unable to find node ${name}`) + for (const container of containers) { + await removeContainer(this.environment.docker.getContainer(container.Id)) } - return node - }, + } + } + + async up(options?: Partial): Promise { + return await this.setup(scheduleUp, options) + } + + async runUp(options?: Partial): Promise { + const run = await this.up(options) + return await run.start() + } + + async down(): Promise { + return await this.setup(scheduleDown, {}) + } + + async runDown(): Promise { + const run = await this.down() + return await run.start() } + + async exec(options?: Partial): Promise { + return await this.setup(scheduleExecution, options) + } + + async runExec(options?: Partial): Promise { + const run = await this.exec(options) + return await run.start() + } + + async clean(options?: Partial): Promise { + await cleanCache(this.workTree, this.environment, { + service: options?.service ?? false, + }) + } + + async restore(path: string): Promise { + await restoreCache(path, this.workTree, this.environment) + } + + async store(path: string): Promise { + await storeCache(path, this.workTree, this.environment) + } + + ls(): CliItem[] { + return [ + ...Array.from(iterateWorkServices(this.workTree.services)).map((item) => ({ item, type: 'service' })), + ...Array.from(iterateWorkNodes(this.workTree.nodes)).map((item) => ({ item, type: 'task' })), + ] + } + + validate(): AsyncGenerator { + return validate(this.workTree, this.environment) + } + + node(name: string): WorkNode { + const node = Object.values(this.workTree.nodes).find((n) => n.name == name) + if (!node) { + throw new Error(`unable to find node ${name}`) + } + return node + } +} + +export function getCli(workTree: WorkTree, environment: Environment): Cli { + return new Cli(workTree, environment) } diff --git a/src/executer/are-all-services-down.ts b/src/executer/are-all-services-down.ts new file mode 100644 index 00000000..b84629ba --- /dev/null +++ b/src/executer/are-all-services-down.ts @@ -0,0 +1,11 @@ +import { SchedulerState } from './scheduler/scheduler-state' +import { iterateWorkServices } from '../planner/utils/plan-work-nodes' + +export function areAllServicesDown(state: SchedulerState): boolean { + for (const service of iterateWorkServices(state.service)) { + if (service.type === 'running' || service.type === 'starting' || service.type === 'canceled') { + return false + } + } + return true +} diff --git a/src/executer/are-all-services-running.ts b/src/executer/are-all-services-running.ts new file mode 100644 index 00000000..48fd4903 --- /dev/null +++ b/src/executer/are-all-services-running.ts @@ -0,0 +1,11 @@ +import { SchedulerState } from './scheduler/scheduler-state' +import { iterateWorkServices } from '../planner/utils/plan-work-nodes' + +export function areAllServicesRunning(state: SchedulerState): boolean { + for (const service of iterateWorkServices(state.service)) { + if (service.type !== 'running') { + return false + } + } + return true +} diff --git a/src/executer/create-scheduler-state.ts b/src/executer/create-scheduler-state.ts index 3d3226cb..df06b32d 100644 --- a/src/executer/create-scheduler-state.ts +++ b/src/executer/create-scheduler-state.ts @@ -11,6 +11,7 @@ export interface CreateSchedulerState { nodes: WorkNodes services: WorkServices watch: boolean + daemon: boolean logMode: LogMode cacheMethod: CacheMethod } @@ -21,6 +22,7 @@ export function createSchedulerState(input: CreateSchedulerState): State { node: {}, cacheMethod: input.cacheMethod, watch: input.watch, + daemon: input.daemon, } for (const node of iterateWorkNodes(input.nodes)) { diff --git a/src/executer/docker-node.ts b/src/executer/docker-node.ts index 53f2684e..532a9c97 100644 --- a/src/executer/docker-node.ts +++ b/src/executer/docker-node.ts @@ -36,6 +36,7 @@ export function getNeedsNetwork(serviceContainers: { [key: string]: ServiceDns } function buildCreateOptions( node: ContainerWorkNode, + stateKey: string, serviceContainers: { [key: string]: ServiceDns } ): ContainerCreateOptions { const network = getNeedsNetwork(serviceContainers, node.needs) @@ -47,7 +48,13 @@ function buildCreateOptions( Cmd: ['-c', 'sleep 3600'], Env: Object.keys(node.envs).map((k) => `${k}=${node.envs[k]}`), WorkingDir: convertToPosixPath(node.cwd), - Labels: { app: 'hammerkit', 'hammerkit-id': node.id, 'hammerkit-type': 'task' }, + Labels: { + app: 'hammerkit', + 'hammerkit-id': node.id, + 'hammerkit-pid': process.pid.toString(), + 'hammerkit-type': 'task', + 'hammerkit-state': stateKey, + }, HostConfig: { Binds: [ ...node.mounts.map((v) => `${v.localPath}:${convertToPosixPath(v.containerPath)}`), @@ -89,7 +96,7 @@ export function dockerNode( await prepareVolume(node, environment) checkForAbort(abort.signal) - const containerOptions = buildCreateOptions(node, serviceContainers) + const containerOptions = buildCreateOptions(node, stateKey, serviceContainers) printContainerOptions(status, containerOptions) const success = await usingContainer(environment, node, containerOptions, async (container) => { diff --git a/src/executer/docker-service.ts b/src/executer/docker-service.ts index 25468b9f..113e0250 100644 --- a/src/executer/docker-service.ts +++ b/src/executer/docker-service.ts @@ -43,7 +43,13 @@ export function dockerService( container = await environment.docker.createContainer({ Image: service.image, Env: Object.keys(service.envs).map((k) => `${k}=${service.envs[k]}`), - Labels: { app: 'hammerkit', 'hammerkit-id': service.id, 'hammerkit-type': 'service' }, + Labels: { + app: 'hammerkit', + 'hammerkit-id': service.id, + 'hammerkit-pid': process.pid.toString(), + 'hammerkit-type': 'service', + 'hammerkit-state': stateKey, + }, ExposedPorts: service.ports.reduce<{ [key: string]: Record }>((map, port) => { map[`${port.containerPort}/tcp`] = {} return map @@ -69,14 +75,13 @@ export function dockerService( await container.start() - //await setUserPermissions(service, container, environment) - if (!service.healthcheck) { state.patchService({ type: 'running', service, dns: { containerId: container.id }, stateKey, + remote: null, }) } else { let ready = false @@ -93,18 +98,21 @@ export function dockerService( service, dns: { containerId: container.id }, stateKey, + remote: null, }) } } - await waitOnAbort(abort.signal) + if (!state.current.daemon) { + await waitOnAbort(abort.signal) - state.patchService({ - type: 'end', - service, - stateKey, - reason: 'terminated', - }) + state.patchService({ + type: 'end', + service, + stateKey, + reason: 'terminated', + }) + } } catch (e) { if (e instanceof AbortError) { state.patchService({ @@ -122,7 +130,7 @@ export function dockerService( }) } } finally { - if (container) { + if (container && !state.current.daemon) { try { await removeContainer(container) } catch (e) { diff --git a/src/executer/ensure-needs.ts b/src/executer/ensure-needs.ts new file mode 100644 index 00000000..32fdd2a3 --- /dev/null +++ b/src/executer/ensure-needs.ts @@ -0,0 +1,63 @@ +import { NodeState } from './scheduler/node-state' +import { isServiceState, ServiceState } from './scheduler/service-state' +import { WorkService } from '../planner/work-service' +import { ProcessManager } from './process-manager' +import { State } from './state' +import { Environment } from './environment' +import { SchedulerState } from './scheduler/scheduler-state' +import { schedulePendingServices } from './schedule-pending-services' + +export function ensureNeeds( + nodeOrServiceState: NodeState | ServiceState, + needs: WorkService[], + processManager: ProcessManager, + state: State, + environment: Environment, + currentState: SchedulerState +): boolean { + const endedNeeds = needs + .map((need) => currentState.service[need.id]) + .filter((service) => service.type === 'end' || service.type === 'canceled') + + if (endedNeeds.length > 0) { + if (isServiceState(nodeOrServiceState)) { + state.patchService({ + type: 'error', + service: nodeOrServiceState.service, + stateKey: nodeOrServiceState.stateKey, + errorMessage: endedNeeds + .map((n) => `service ${n.service.name} ${n.type === 'end' ? 'has ended ' + n.reason : 'was canceled'}`) + .join(', '), + }) + } else { + state.patchNode( + { + type: 'error', + node: nodeOrServiceState.node, + stateKey: nodeOrServiceState.stateKey, + errorMessage: endedNeeds + .map((n) => `service ${n.service.name} ${n.type === 'end' ? 'has ended ' + n.reason : 'was canceled'}`) + .join(', '), + }, + nodeOrServiceState.stateKey + ) + } + return false + } + + const pendingNeeds = needs + .map((need) => currentState.service[need.id]) + .filter((service) => service.type === 'pending') + + if (pendingNeeds.length > 0) { + schedulePendingServices(pendingNeeds, state, processManager, environment, currentState) + return false + } + + const hasNotReadyNeeds = needs.some((need) => currentState.service[need.id].type !== 'running') + if (hasNotReadyNeeds) { + return false + } + + return true +} diff --git a/src/executer/event-cache.ts b/src/executer/event-cache.ts index eab7f7f5..add8a8ed 100644 --- a/src/executer/event-cache.ts +++ b/src/executer/event-cache.ts @@ -52,7 +52,12 @@ async function restoreContainer(environment: Environment, node: ContainerWorkNod Entrypoint: 'sh', Cmd: ['-c', 'sleep 3600'], WorkingDir: convertToPosixPath(node.cwd), - Labels: { app: 'hammerkit', 'hammerkit-id': node.id, 'hammerkit-type': 'task' }, + Labels: { + app: 'hammerkit', + 'hammerkit-id': node.id, + 'hammerkit-pid': process.pid.toString(), + 'hammerkit-type': 'task', + }, HostConfig: { AutoRemove: true, Binds: [...node.volumes.map((v) => `${v.name}:${convertToPosixPath(v.containerPath)}`)], @@ -91,7 +96,12 @@ async function archiveContainer(environment: Environment, node: ContainerWorkNod Entrypoint: 'sh', Cmd: ['-c', 'sleep 3600'], WorkingDir: convertToPosixPath(node.cwd), - Labels: { app: 'hammerkit', 'hammerkit-id': node.id, 'hammerkit-type': 'task' }, + Labels: { + app: 'hammerkit', + 'hammerkit-id': node.id, + 'hammerkit-pid': process.pid.toString(), + 'hammerkit-type': 'task', + }, HostConfig: { AutoRemove: true, Binds: [...node.volumes.map((v) => `${v.name}:${convertToPosixPath(v.containerPath)}`)], diff --git a/src/executer/execute-docker.ts b/src/executer/execute-docker.ts index 1d920acc..973a0273 100644 --- a/src/executer/execute-docker.ts +++ b/src/executer/execute-docker.ts @@ -67,6 +67,7 @@ export async function execCommand( timeout: number | undefined, abort: AbortSignal | undefined ): Promise { + const abortController = new AbortController() const exec = await container.exec({ Cmd: cmd, WorkingDir: cwd, @@ -75,11 +76,19 @@ export async function execCommand( AttachStdin: true, AttachStderr: true, User: user ?? undefined, + abortSignal: abortController.signal, }) status.write('debug', `received exec id ${exec.id}`) - const stream = await exec.start({ stdin: true, hijack: true, Detach: false, Tty: false }) + const stream = await exec.start({ + stdin: true, + hijack: true, + Detach: false, + Tty: false, + abortSignal: abortController.signal, + }) + let timeoutHandle: NodeJS.Timer | undefined = undefined return new Promise((resolve, reject) => { let resolved = false @@ -91,6 +100,10 @@ export async function execCommand( resolved = true resolve({ type: 'canceled' }) + abortController.abort() + if (timeoutHandle) { + clearTimeout(timeoutHandle) + } }) } @@ -107,18 +120,23 @@ export async function execCommand( resolve({ type: 'result', result }) resolved = true + abortController.abort() + if (timeoutHandle) { + clearTimeout(timeoutHandle) + } }) }) .catch(reject) if (timeout) { - setTimeout(() => { + timeoutHandle = setTimeout(() => { if (resolved) { return } resolve({ type: 'timeout' }) resolved = true + abortController.abort() }, timeout) } }) diff --git a/src/executer/execute.spec.ts b/src/executer/execute.spec.ts index 85752075..edfe4c3d 100644 --- a/src/executer/execute.spec.ts +++ b/src/executer/execute.spec.ts @@ -10,7 +10,7 @@ describe('execute', () => { const { cli, environment } = await suite.setup({ taskName: 'api' }) const node = cli.node('api') - const exec = cli.execWatch({ watch: true }) + const exec = await cli.exec({ watch: true }) let count = 0 exec.state.on((state) => { @@ -33,7 +33,7 @@ describe('execute', () => { const { cli, environment } = await suite.setup({ taskName: 'api_crashing' }) const node = cli.node('api_crashing') - const exec = cli.execWatch({ watch: true }) + const exec = await cli.exec({ watch: true }) let count = 0 exec.state.on((state) => { diff --git a/src/executer/get-scheduler-down-result.ts b/src/executer/get-scheduler-down-result.ts new file mode 100644 index 00000000..5a34153b --- /dev/null +++ b/src/executer/get-scheduler-down-result.ts @@ -0,0 +1,10 @@ +import { SchedulerState } from './scheduler/scheduler-state' +import { SchedulerResult } from './scheduler/scheduler-result' + +export function getSchedulerDownResult(state: SchedulerState): SchedulerResult { + const success = Object.values(state.service).some((n) => n.type === 'running' || n.type === 'starting') + return { + state, + success, + } +} diff --git a/src/executer/get-scheduler-execute-result.ts b/src/executer/get-scheduler-execute-result.ts new file mode 100644 index 00000000..bd1abcaa --- /dev/null +++ b/src/executer/get-scheduler-execute-result.ts @@ -0,0 +1,10 @@ +import { SchedulerState } from './scheduler/scheduler-state' +import { SchedulerResult } from './scheduler/scheduler-result' + +export function getSchedulerExecuteResult(state: SchedulerState): SchedulerResult { + const success = !Object.values(state.node).some((n) => n.type !== 'completed') + return { + state, + success, + } +} diff --git a/src/executer/get-scheduler-up-result.ts b/src/executer/get-scheduler-up-result.ts new file mode 100644 index 00000000..b872ce15 --- /dev/null +++ b/src/executer/get-scheduler-up-result.ts @@ -0,0 +1,10 @@ +import { SchedulerState } from './scheduler/scheduler-state' +import { SchedulerResult } from './scheduler/scheduler-result' + +export function getSchedulerUpResult(state: SchedulerState): SchedulerResult { + const success = !Object.values(state.service).some((n) => n.type !== 'running') + return { + state, + success, + } +} diff --git a/src/executer/get-service-containers.ts b/src/executer/get-service-containers.ts new file mode 100644 index 00000000..d85bea97 --- /dev/null +++ b/src/executer/get-service-containers.ts @@ -0,0 +1,17 @@ +import { SchedulerState } from './scheduler/scheduler-state' +import { WorkService } from '../planner/work-service' +import { ServiceDns } from './service-dns' + +export function getServiceContainers( + currentState: SchedulerState, + needs: WorkService[] +): { [key: string]: ServiceDns } { + const serviceContainers: { [key: string]: ServiceDns } = {} + for (const need of needs) { + const serviceState = currentState.service[need.id] + if (serviceState.type === 'running') { + serviceContainers[need.id] = serviceState.dns + } + } + return serviceContainers +} diff --git a/src/executer/has-ended-service.ts b/src/executer/has-ended-service.ts new file mode 100644 index 00000000..85db9230 --- /dev/null +++ b/src/executer/has-ended-service.ts @@ -0,0 +1,12 @@ +import { SchedulerState } from './scheduler/scheduler-state' +import { iterateWorkServices } from '../planner/utils/plan-work-nodes' + +export function hasEndedService(state: SchedulerState): boolean { + for (const service of iterateWorkServices(state.service)) { + if (service.type === 'end' || service.type === 'error') { + return true + } + } + + return false +} diff --git a/src/executer/has-service-an-active-need.ts b/src/executer/has-service-an-active-need.ts new file mode 100644 index 00000000..a329b823 --- /dev/null +++ b/src/executer/has-service-an-active-need.ts @@ -0,0 +1,31 @@ +import { SchedulerState } from './scheduler/scheduler-state' +import { iterateWorkNodes, iterateWorkServices } from '../planner/utils/plan-work-nodes' + +export function hasServiceAnActiveNeed(currentState: SchedulerState, serviceId: string) { + let hasNeed = false + for (const nodeState of iterateWorkNodes(currentState.node)) { + if (nodeState.type === 'running' || nodeState.type === 'starting' || nodeState.type === 'pending') { + if (nodeState.node.needs.some((n) => n.id === serviceId)) { + hasNeed = true + break + } + } + } + + if (!hasNeed) { + for (const serviceState of iterateWorkServices(currentState.service)) { + if ( + serviceState.type === 'ready' || + serviceState.type === 'running' || + serviceState.type === 'starting' || + serviceState.type === 'pending' + ) { + if (serviceState.service.needs.some((n) => n.id === serviceId)) { + hasNeed = true + break + } + } + } + } + return hasNeed +} diff --git a/src/executer/kubernetes-service.ts b/src/executer/kubernetes-service.ts index a67dfe76..162fcefd 100644 --- a/src/executer/kubernetes-service.ts +++ b/src/executer/kubernetes-service.ts @@ -49,6 +49,7 @@ function startForward( type: 'running', dns: { host: 'host-gateway' }, stateKey, + remote: null, }) }) ps.stderr?.on('data', async (data) => { diff --git a/src/executer/label-values.ts b/src/executer/label-values.ts index 01b6f291..89fcbc25 100644 --- a/src/executer/label-values.ts +++ b/src/executer/label-values.ts @@ -1,5 +1,7 @@ import { WorkNode } from '../planner/work-node' -import { WorkNodes } from '../planner/work-nodes' +import { WorkTree } from '../planner/work-tree' +import { iterateWorkNodes, iterateWorkServices } from '../planner/utils/plan-work-nodes' +import { WorkService } from '../planner/work-service' export interface LabelValues { [key: string]: string[] @@ -9,18 +11,18 @@ export function hasLabels(labels: LabelValues) { return Object.keys(labels).length > 0 } -export function matchesAllLabels(filterLabels: LabelValues, node: WorkNode, nodes: WorkNodes): boolean { +export function matchesAllLabels(filterLabels: LabelValues, node: WorkNode | WorkService, workTree: WorkTree): boolean { for (const [labelKey, filterValues] of Object.entries(filterLabels)) { const nodeValues = node.labels[labelKey] if (nodeValues === null || nodeValues === undefined || nodeValues.length === 0) { - if (!hasNeedsWithMatch(node, nodes, (depNode) => matchesAllLabels(filterLabels, depNode, nodes))) { + if (!hasNeedsWithMatch(node, workTree, (depNode) => matchesAllLabels(filterLabels, depNode, workTree))) { return false } } for (const nodeValue of nodeValues || []) { if (filterValues.indexOf(nodeValue) === -1) { - if (!hasNeedsWithMatch(node, nodes, (depNode) => matchesAllLabels(filterLabels, depNode, nodes))) { + if (!hasNeedsWithMatch(node, workTree, (depNode) => matchesAllLabels(filterLabels, depNode, workTree))) { return false } } @@ -30,14 +32,25 @@ export function matchesAllLabels(filterLabels: LabelValues, node: WorkNode, node return true } -export function hasNeedsWithMatch(root: WorkNode, nodes: WorkNodes, matcher: (node: WorkNode) => boolean): boolean { - for (const node of Object.values(nodes)) { +export function hasNeedsWithMatch( + root: WorkNode | WorkService, + workTree: WorkTree, + matcher: (node: WorkNode | WorkService) => boolean +): boolean { + for (const node of iterateWorkNodes(workTree.nodes)) { if (node.deps.some((d) => d.id === root.id)) { if (matcher(node)) { return true } } } + for (const service of iterateWorkServices(workTree.services)) { + if (service.deps.some((d) => d.id === root.id)) { + if (matcher(service)) { + return true + } + } + } return false } diff --git a/src/executer/schedule-down.ts b/src/executer/schedule-down.ts new file mode 100644 index 00000000..027d39fd --- /dev/null +++ b/src/executer/schedule-down.ts @@ -0,0 +1,19 @@ +import { ProcessManager } from './process-manager' +import { State } from './state' +import { Environment } from './environment' +import { SchedulerResult } from './scheduler/scheduler-result' +import { unscheduleServices } from './unschedule-services' +import { areAllServicesDown } from './are-all-services-down' +import { getSchedulerDownResult } from './get-scheduler-down-result' + +export async function scheduleDown(processManager: ProcessManager, state: State, environment: Environment) { + return new Promise((resolve) => { + state.on((currentState) => { + unscheduleServices(currentState, state, processManager, environment, true) + + if (areAllServicesDown(currentState)) { + resolve(getSchedulerDownResult(currentState)) + } + }) + }) +} diff --git a/src/executer/schedule-execution.ts b/src/executer/schedule-execution.ts new file mode 100644 index 00000000..6e78658c --- /dev/null +++ b/src/executer/schedule-execution.ts @@ -0,0 +1,29 @@ +import { Environment } from './environment' +import { SchedulerResult } from './scheduler/scheduler-result' +import { State } from './state' +import { ProcessManager } from './process-manager' +import { startWatchProcesses } from '../start-watch-processes' +import { scheduleReadyServices } from './schedule-ready-services' +import { scheduleNodes } from './schedule-nodes' +import { unscheduleServices } from './unschedule-services' +import { getSchedulerExecuteResult } from './get-scheduler-execute-result' + +export async function scheduleExecution( + processManager: ProcessManager, + state: State, + environment: Environment +): Promise { + if (state.current.watch) { + startWatchProcesses(state, processManager, environment) + } + + state.on((currentState) => { + scheduleNodes(currentState, state, processManager, environment) + scheduleReadyServices(currentState, state, processManager, environment) + unscheduleServices(currentState, state, processManager, environment, false) + }) + + await processManager.onComplete() + + return getSchedulerExecuteResult(state.current) +} diff --git a/src/executer/schedule-nodes.ts b/src/executer/schedule-nodes.ts new file mode 100644 index 00000000..1ad65da5 --- /dev/null +++ b/src/executer/schedule-nodes.ts @@ -0,0 +1,71 @@ +import { SchedulerState } from './scheduler/scheduler-state' +import { State } from './state' +import { ProcessManager } from './process-manager' +import { Environment } from './environment' +import { startNode } from './start-node' +import { ensureNeeds } from './ensure-needs' +import { getServiceContainers } from './get-service-containers' +import { logContext } from '../planner/work-node-status' +import { isContainerWorkNode } from '../planner/work-node' +import { dockerNode } from './docker-node' +import { localNode } from './local-node' +import { iterateWorkNodes } from '../planner/utils/plan-work-nodes' + +export function scheduleNodes( + currentState: SchedulerState, + state: State, + processManager: ProcessManager, + environment: Environment +) { + for (const nodeState of iterateWorkNodes(currentState.node)) { + if (nodeState.type === 'pending') { + const hasOpenDeps = nodeState.node.deps.some((dep) => currentState.node[dep.id].type !== 'completed') + if (hasOpenDeps) { + continue + } + + state.patchNode( + { + type: 'starting', + node: nodeState.node, + started: new Date(), + stateKey: null, + }, + nodeState.stateKey + ) + + processManager.background( + { + type: 'task', + name: nodeState.node.name, + id: nodeState.node.id + '-cache', + }, + async (abort) => { + await startNode(nodeState, state, environment, abort.signal) + } + ) + } else if (nodeState.type === 'ready') { + if (!ensureNeeds(nodeState, nodeState.node.needs, processManager, state, environment, currentState)) { + continue + } + + const serviceContainers = getServiceContainers(currentState, nodeState.node.needs) + + state.patchNode( + { + type: 'running', + node: nodeState.node, + stateKey: nodeState.stateKey, + started: nodeState.started, + }, + nodeState.stateKey + ) + const ctx = logContext('task', nodeState.node) + if (isContainerWorkNode(nodeState.node)) { + processManager.task(ctx, dockerNode(nodeState.node, nodeState.stateKey, serviceContainers, state, environment)) + } else { + processManager.task(ctx, localNode(nodeState.node, nodeState.stateKey, state, environment)) + } + } + } +} diff --git a/src/executer/schedule-pending-services.ts b/src/executer/schedule-pending-services.ts new file mode 100644 index 00000000..bac38afd --- /dev/null +++ b/src/executer/schedule-pending-services.ts @@ -0,0 +1,45 @@ +import { ServiceState } from './scheduler/service-state' +import { ProcessManager } from './process-manager' +import { State } from './state' +import { Environment } from './environment' +import { SchedulerState } from './scheduler/scheduler-state' +import { getServiceContainers } from './get-service-containers' +import { startService } from './start-node' +import { ensureNeeds } from './ensure-needs' + +export function schedulePendingServices( + services: ServiceState[], + state: State, + processManager: ProcessManager, + environment: Environment, + currentState: SchedulerState +) { + for (const service of services) { + if (service.type !== 'pending') { + continue + } + + if (!ensureNeeds(service, service.service.needs, processManager, state, environment, currentState)) { + continue + } + + state.patchService({ + type: 'starting', + service: service.service, + stateKey: null, + }) + + const serviceContainers = getServiceContainers(currentState, service.service.needs) + + processManager.background( + { + type: 'service', + name: service.service.name, + id: service.service.id + '-cache', + }, + async (abort) => { + await startService(service.service, state, serviceContainers, environment, abort.signal) + } + ) + } +} diff --git a/src/executer/schedule-ready-services.ts b/src/executer/schedule-ready-services.ts new file mode 100644 index 00000000..dfd091af --- /dev/null +++ b/src/executer/schedule-ready-services.ts @@ -0,0 +1,40 @@ +import { SchedulerState } from './scheduler/scheduler-state' +import { State } from './state' +import { ProcessManager } from './process-manager' +import { Environment } from './environment' +import { logContext } from '../planner/work-node-status' +import { getServiceContainers } from './get-service-containers' +import { isContainerWorkService } from '../planner/work-service' +import { dockerService } from './docker-service' +import { kubernetesService } from './kubernetes-service' + +export function scheduleReadyServices( + currentState: SchedulerState, + state: State, + processManager: ProcessManager, + environment: Environment +) { + for (const [_, serviceState] of Object.entries(currentState.service)) { + const hasOpenDeps = serviceState.service.deps.some((dep) => currentState.node[dep.id].type !== 'completed') + if (hasOpenDeps) { + continue + } + + if (serviceState.type === 'ready') { + const ctx = logContext('service', serviceState.service) + state.patchService({ + type: 'starting', + service: serviceState.service, + stateKey: serviceState.stateKey, + }) + + const serviceContainers = getServiceContainers(currentState, serviceState.service.needs) + processManager.background( + ctx, + isContainerWorkService(serviceState.service) + ? dockerService(serviceState.service, serviceState.stateKey, serviceContainers, state, environment) + : kubernetesService(serviceState.service, serviceState.stateKey, state, environment) + ) + } + } +} diff --git a/src/executer/schedule-up.ts b/src/executer/schedule-up.ts new file mode 100644 index 00000000..8a4af6ce --- /dev/null +++ b/src/executer/schedule-up.ts @@ -0,0 +1,29 @@ +import { ProcessManager } from './process-manager' +import { State } from './state' +import { Environment } from './environment' +import { scheduleNodes } from './schedule-nodes' +import { schedulePendingServices } from './schedule-pending-services' +import { scheduleReadyServices } from './schedule-ready-services' +import { startWatchProcesses } from '../start-watch-processes' +import { hasEndedService } from './has-ended-service' +import { getSchedulerUpResult } from './get-scheduler-up-result' + +export async function scheduleUp(processManager: ProcessManager, state: State, environment: Environment) { + if (state.current.watch && !state.current.daemon) { + startWatchProcesses(state, processManager, environment) + } + + state.on((currentState) => { + if (!environment.abortCtrl.signal.aborted && hasEndedService(currentState)) { + environment.abortCtrl.abort() + } + + scheduleNodes(currentState, state, processManager, environment) + schedulePendingServices(Object.values(currentState.service), state, processManager, environment, currentState) + scheduleReadyServices(currentState, state, processManager, environment) + }) + + await processManager.onComplete() + + return getSchedulerUpResult(state.current) +} diff --git a/src/executer/schedule.ts b/src/executer/schedule.ts deleted file mode 100644 index 450559c4..00000000 --- a/src/executer/schedule.ts +++ /dev/null @@ -1,243 +0,0 @@ -import { ServiceDns } from './service-dns' -import { Environment } from './environment' -import { SchedulerResult } from './scheduler/scheduler-result' -import { isContainerWorkService, WorkService } from '../planner/work-service' -import { dockerService } from './docker-service' -import { kubernetesService } from './kubernetes-service' -import { isContainerWorkNode } from '../planner/work-node' -import { dockerNode } from './docker-node' -import { localNode } from './local-node' -import { State } from './state' -import { ProcessManager } from './process-manager' -import { logContext } from '../planner/work-node-status' -import { startWatchProcesses } from '../start-watch-processes' -import { startNode, startService } from './start-node' -import { iterateWorkNodes, iterateWorkServices } from '../planner/utils/plan-work-nodes' -import { SchedulerState } from './scheduler/scheduler-state' -import { NodeState } from './scheduler/node-state' -import { isServiceState, ServiceState } from './scheduler/service-state' - -function ensureNeeds( - nodeOrServiceState: NodeState | ServiceState, - needs: WorkService[], - processManager: ProcessManager, - state: State, - environment: Environment, - currentState: SchedulerState -): boolean { - const endedNeeds = needs - .map((need) => currentState.service[need.id]) - .filter((service) => service.type === 'end' || service.type === 'canceled') - if (endedNeeds.length > 0) { - if (isServiceState(nodeOrServiceState)) { - state.patchService({ - type: 'error', - service: nodeOrServiceState.service, - stateKey: nodeOrServiceState.stateKey, - errorMessage: endedNeeds - .map((n) => `service ${n.service.name} ${n.type === 'end' ? 'has ended ' + n.reason : 'was canceled'}`) - .join(', '), - }) - } else { - state.patchNode( - { - type: 'error', - node: nodeOrServiceState.node, - stateKey: nodeOrServiceState.stateKey, - errorMessage: endedNeeds - .map((n) => `service ${n.service.name} ${n.type === 'end' ? 'has ended ' + n.reason : 'was canceled'}`) - .join(', '), - }, - nodeOrServiceState.stateKey - ) - } - return false - } - - const pendingNeeds = needs - .map((need) => currentState.service[need.id]) - .filter((service) => service.type === 'pending') - - if (pendingNeeds.length > 0) { - for (const pendingNeed of pendingNeeds) { - if (!ensureNeeds(pendingNeed, pendingNeed.service.needs, processManager, state, environment, currentState)) { - continue - } - - state.patchService({ - type: 'starting', - service: pendingNeed.service, - stateKey: null, - }) - - const serviceContainers = getServiceContainers(currentState, pendingNeed.service.needs) - - processManager.background( - { - type: 'service', - name: pendingNeed.service.name, - id: pendingNeed.service.id + '-cache', - }, - async (abort) => { - await startService(pendingNeed.service, state, serviceContainers, environment, abort.signal) - } - ) - } - return false - } - - const hasNotReadyNeeds = needs.some((need) => currentState.service[need.id].type !== 'running') - if (hasNotReadyNeeds) { - return false - } - - return true -} - -export function getServiceContainers( - currentState: SchedulerState, - needs: WorkService[] -): { [key: string]: ServiceDns } { - const serviceContainers: { [key: string]: ServiceDns } = {} - for (const need of needs) { - const serviceState = currentState.service[need.id] - if (serviceState.type === 'running') { - serviceContainers[need.id] = serviceState.dns - } - } - return serviceContainers -} - -export async function schedule( - processManager: ProcessManager, - state: State, - environment: Environment -): Promise { - if (state.current.watch) { - startWatchProcesses(state, processManager, environment) - } - - state.on((currentState) => { - for (const [, nodeState] of Object.entries(currentState.node)) { - if (nodeState.type === 'pending') { - const hasOpenDeps = nodeState.node.deps.some((dep) => currentState.node[dep.id].type !== 'completed') - if (hasOpenDeps) { - continue - } - - state.patchNode( - { - type: 'starting', - node: nodeState.node, - started: new Date(), - stateKey: null, - }, - nodeState.stateKey - ) - - processManager.background( - { - type: 'task', - name: nodeState.node.name, - id: nodeState.node.id + '-cache', - }, - async (abort) => { - await startNode(nodeState, state, environment, abort.signal) - } - ) - } else if (nodeState.type === 'ready') { - if (!ensureNeeds(nodeState, nodeState.node.needs, processManager, state, environment, currentState)) { - continue - } - - const serviceContainers = getServiceContainers(currentState, nodeState.node.needs) - - state.patchNode( - { - type: 'running', - node: nodeState.node, - stateKey: nodeState.stateKey, - started: nodeState.started, - }, - nodeState.stateKey - ) - const ctx = logContext('task', nodeState.node) - if (isContainerWorkNode(nodeState.node)) { - processManager.task( - ctx, - dockerNode(nodeState.node, nodeState.stateKey, serviceContainers, state, environment) - ) - } else { - processManager.task(ctx, localNode(nodeState.node, nodeState.stateKey, state, environment)) - } - } - } - - for (const [serviceId, serviceState] of Object.entries(currentState.service)) { - const hasOpenDeps = serviceState.service.deps.some((dep) => currentState.node[dep.id].type !== 'completed') - if (hasOpenDeps) { - continue - } - - if (serviceState.type === 'ready') { - const ctx = logContext('service', serviceState.service) - state.patchService({ - type: 'starting', - service: serviceState.service, - stateKey: serviceState.stateKey, - }) - - const serviceContainers = getServiceContainers(currentState, serviceState.service.needs) - processManager.background( - ctx, - isContainerWorkService(serviceState.service) - ? dockerService(serviceState.service, serviceState.stateKey, serviceContainers, state, environment) - : kubernetesService(serviceState.service, serviceState.stateKey, state, environment) - ) - } - - if (serviceState.type !== 'ready' && serviceState.type !== 'running') { - continue - } - - let hasNeed = false - for (const nodeState of iterateWorkNodes(currentState.node)) { - if (nodeState.type === 'running' || nodeState.type === 'starting' || nodeState.type === 'pending') { - if (nodeState.node.needs.some((n) => n.id === serviceId)) { - hasNeed = true - break - } - } - } - - if (!hasNeed) { - for (const serviceState of iterateWorkServices(currentState.service)) { - if ( - serviceState.type === 'ready' || - serviceState.type === 'running' || - serviceState.type === 'starting' || - serviceState.type === 'pending' - ) { - if (serviceState.service.needs.some((n) => n.id === serviceId)) { - hasNeed = true - break - } - } - } - } - - if (!hasNeed) { - const ctx = logContext('service', serviceState.service) - environment.status.service(serviceState.service).write('info', 'stop unused service') - processManager.abort(ctx) - } - } - }) - await processManager.onComplete() - - const success = !Object.values(state.current.node).some((n) => n.type !== 'completed') - return { - state: state.current, - success, - } -} diff --git a/src/executer/scheduler/scheduler-state.ts b/src/executer/scheduler/scheduler-state.ts index 671251de..8ffadeef 100644 --- a/src/executer/scheduler/scheduler-state.ts +++ b/src/executer/scheduler/scheduler-state.ts @@ -14,4 +14,5 @@ export interface SchedulerState { service: SchedulerServiceState cacheMethod: CacheMethod watch: boolean + daemon: boolean } diff --git a/src/executer/scheduler/service-state.ts b/src/executer/scheduler/service-state.ts index 3c02cbcd..095632b4 100644 --- a/src/executer/scheduler/service-state.ts +++ b/src/executer/scheduler/service-state.ts @@ -25,6 +25,7 @@ export interface ServiceRunningState { service: WorkService dns: ServiceDns stateKey: string + remote: { pid?: number; containerId: string } | null } export interface ServiceEndState { diff --git a/src/executer/unschedule-services.ts b/src/executer/unschedule-services.ts new file mode 100644 index 00000000..fe0da5b9 --- /dev/null +++ b/src/executer/unschedule-services.ts @@ -0,0 +1,58 @@ +import { SchedulerState } from './scheduler/scheduler-state' +import { State } from './state' +import { ProcessManager } from './process-manager' +import { Environment } from './environment' +import { hasServiceAnActiveNeed } from './has-service-an-active-need' +import { logContext } from '../planner/work-node-status' +import { removeContainer } from '../docker/remove-container' +import { isContainerWorkService } from '../planner/work-service' +import { getErrorMessage } from '../log' + +export function unscheduleServices( + currentState: SchedulerState, + state: State, + processManager: ProcessManager, + environment: Environment, + force: boolean +) { + for (const [serviceId, serviceState] of Object.entries(currentState.service)) { + if (serviceState.type !== 'ready' && serviceState.type !== 'running') { + continue + } + + const hasNeed = hasServiceAnActiveNeed(currentState, serviceId) + + if (!force && hasNeed && !environment.abortCtrl.signal.aborted) { + continue + } + + const ctx = logContext('service', serviceState.service) + environment.status.service(serviceState.service).write('info', 'stop unused service') + if (serviceState.type === 'ready' || !serviceState.remote) { + processManager.abort(ctx) + } else if (isContainerWorkService(serviceState.service) && serviceState.remote) { + removeContainer(environment.docker.getContainer(serviceState.remote.containerId)) + .then(() => { + state.patchService({ + service: serviceState.service, + type: 'end', + reason: 'terminated', + stateKey: serviceState.stateKey, + }) + }) + .catch((err) => { + state.patchService({ + service: serviceState.service, + type: 'error', + errorMessage: getErrorMessage(err), + stateKey: serviceState.stateKey, + }) + }) + state.patchService({ + service: serviceState.service, + type: 'canceled', + stateKey: serviceState.stateKey, + }) + } + } +} diff --git a/src/executer/work-scope.ts b/src/executer/work-scope.ts index 402355cc..5e39b399 100644 --- a/src/executer/work-scope.ts +++ b/src/executer/work-scope.ts @@ -6,12 +6,15 @@ export interface WorkTaskScope { export interface WorkLabelScope { filterLabels: LabelValues excludeLabels: LabelValues + mode: WorkScopeMode } +export type WorkScopeMode = 'service' | 'all' + export type WorkScope = WorkTaskScope | WorkLabelScope -export function emptyWorkLabelScope(): WorkLabelScope { - return { excludeLabels: {}, filterLabels: {} } +export function emptyWorkLabelScope(mode: WorkScopeMode): WorkLabelScope { + return { excludeLabels: {}, filterLabels: {}, mode } } export const isContextTaskFilter = (target: WorkScope): target is WorkTaskScope => 'taskName' in target diff --git a/src/get-work-context.ts b/src/get-work-context.ts index eccc1d24..9d67e518 100644 --- a/src/get-work-context.ts +++ b/src/get-work-context.ts @@ -13,6 +13,7 @@ export function getWorkScope(buildFile: BuildFile, contextFilter: WorkScope, env const tree = isContextTaskFilter(contextFilter) ? planWorkTree(buildFile, { taskName: contextFilter.taskName }) : planWorkNodes(buildFile, contextFilter) + for (const node of iterateWorkNodes(tree.nodes)) { node.envs = replaceEnvVariables(node.envs, environment.processEnvs) if (!isContainerWorkNode(node)) { diff --git a/src/planner/utils/parse-work-node-needs.ts b/src/planner/utils/parse-work-node-needs.ts index 500d0e01..7c3dfe9c 100644 --- a/src/planner/utils/parse-work-node-needs.ts +++ b/src/planner/utils/parse-work-node-needs.ts @@ -9,6 +9,7 @@ import { getDefaultKubeConfig } from './get-default-kube-config' import { BuildFileNameSelector, findBuildService } from './find-build-value' import { ExecutionBuildService } from '../../parser/build-file-service' import { assignDependencies } from './assign-dependencies' +import { mapLabels } from './map-labels' export function getWorkService(context: WorkContext, selector: BuildFileNameSelector): WorkService { const service = findBuildService(context, selector) @@ -51,6 +52,7 @@ export function parseWorkNodeNeeds( ports: (service.ports || []).map((m) => templateValue(m, service.envs)).map((m) => parseWorkPort(m)), needs: [], deps: [], + labels: mapLabels(service.labels), } if (service.image) { return { diff --git a/src/planner/utils/plan-work-nodes.ts b/src/planner/utils/plan-work-nodes.ts index d05a16ff..8231d5df 100644 --- a/src/planner/utils/plan-work-nodes.ts +++ b/src/planner/utils/plan-work-nodes.ts @@ -11,18 +11,23 @@ import { WorkTree } from '../work-tree' import { createSubWorkContext, createWorkContext, WorkContext } from '../work-context' import { WorkLabelScope } from '../../executer/work-scope' import { matchesAllLabels, matchesAnyLabel } from '../../executer/label-values' +import { getWorkService } from './parse-work-node-needs' export function planWorkNodes(build: BuildFile, options: WorkLabelScope): WorkTree { const context = createWorkContext(build) - addWorkNodes(context, []) + if (options.mode === 'all') { + addWorkNodes(context, []) + } else { + addWorkServices(context, []) + } const nodesIdsToRemove: string[] = [] - for (const [nodeId, node] of Object.entries(context.workTree.nodes)) { + for (const node of iterateWorkNodes(context.workTree.nodes)) { if ( - !matchesAllLabels(options.filterLabels, node, context.workTree.nodes) || + !matchesAllLabels(options.filterLabels, node, context.workTree) || matchesAnyLabel(options.excludeLabels, node) ) { - nodesIdsToRemove.push(nodeId) + nodesIdsToRemove.push(node.id) } } @@ -30,7 +35,9 @@ export function planWorkNodes(build: BuildFile, options: WorkLabelScope): WorkTr removeNode(nodeId, context.workTree) } - removeUnusedServices(context.workTree) + if (options.mode === 'all') { + removeUnusedServices(context.workTree) + } return context.workTree } @@ -101,3 +108,22 @@ function addWorkNodes(context: WorkContext, files: string[]) { addWorkNodes(createSubWorkContext(context, { type: 'includes', name }), files) } } + +function addWorkServices(context: WorkContext, files: string[]) { + if (files.indexOf(context.build.fileName) !== -1) { + return + } + + files.push(context.build.fileName) + for (const serviceName of Object.keys(context.build.services)) { + getWorkService(context, { name: serviceName }) + } + + for (const name of Object.keys(context.build.references)) { + addWorkServices(createSubWorkContext(context, { type: 'references', name }), files) + } + + for (const name of Object.keys(context.build.includes)) { + addWorkServices(createSubWorkContext(context, { type: 'includes', name }), files) + } +} diff --git a/src/planner/work-service.ts b/src/planner/work-service.ts index bd2846df..c12d1c88 100644 --- a/src/planner/work-service.ts +++ b/src/planner/work-service.ts @@ -7,6 +7,7 @@ import { import { WorkMount } from './work-mount' import { WorkNode } from './work-node' import { WorkVolume } from './work-volume' +import { LabelValues } from '../executer/label-values' export interface BaseWorkService { id: string @@ -15,6 +16,7 @@ export interface BaseWorkService { ports: WorkPort[] needs: WorkService[] deps: WorkNode[] + labels: LabelValues buildService: ExecutionBuildService } diff --git a/src/program.ts b/src/program.ts index 70155d01..4a83c3c9 100644 --- a/src/program.ts +++ b/src/program.ts @@ -5,7 +5,7 @@ import { isCI } from './utils/ci' import { parseLabelArguments } from './parser/parse-label-arguments' import { Cli, getCli, isCliService, isCliTask } from './cli' import { getBuildFile } from './parser/get-build-file' -import { emptyWorkLabelScope, WorkScope } from './executer/work-scope' +import { emptyWorkLabelScope, WorkLabelScope, WorkScope, WorkScopeMode } from './executer/work-scope' import { getWorkScope } from './get-work-context' import { printItem, printProperty, printTitle } from './log' import { hasLabels } from './executer/label-values' @@ -17,8 +17,8 @@ export async function createCli(fileName: string, environment: Environment, work return getCli(workTree, environment) } -function parseWorkScope(options: unknown): WorkScope { - const scope = emptyWorkLabelScope() +function parseWorkLabelScope(options: unknown, mode: WorkScopeMode): WorkLabelScope { + const scope = emptyWorkLabelScope(mode) if (typeof options !== 'object') { return scope } @@ -60,7 +60,7 @@ export async function getProgram( .addOption(new Option('-f, --filter ', 'filter task and services with labels')) .addOption(new Option('-e, --exclude ', 'exclude task and services with labels')) .action(async (options) => { - const cli = await createCli(fileName, environment, parseWorkScope(options)) + const cli = await createCli(fileName, environment, parseWorkLabelScope(options, 'all')) const items = cli.ls() const tasks = items.filter(isCliTask) @@ -139,7 +139,7 @@ export async function getProgram( .addOption(new Option('-f, --filter ', 'filter task and services with labels')) .addOption(new Option('-e, --exclude ', 'exclude task and services with labels')) .action(async (options) => { - const cli = await createCli(fileName, environment, parseWorkScope(options)) + const cli = await createCli(fileName, environment, parseWorkLabelScope(options, 'all')) await cli.clean({ service: options.service, }) @@ -151,7 +151,7 @@ export async function getProgram( .addOption(new Option('-f, --filter ', 'filter task and services with labels')) .addOption(new Option('-e, --exclude ', 'exclude task and services with labels')) .action(async (path, options) => { - const cli = await createCli(fileName, environment, parseWorkScope(options)) + const cli = await createCli(fileName, environment, parseWorkLabelScope(options, 'all')) await cli.store(resolve(path)) }) @@ -161,7 +161,7 @@ export async function getProgram( .addOption(new Option('-f, --filter ', 'filter task and services with labels')) .addOption(new Option('-e, --exclude ', 'exclude task and services with labels')) .action(async (path, options) => { - const cli = await createCli(fileName, environment, parseWorkScope(options)) + const cli = await createCli(fileName, environment, parseWorkLabelScope(options, 'all')) await cli.restore(resolve(path)) }) @@ -173,7 +173,7 @@ export async function getProgram( .action(async (options) => { let errors = 0 - const cli = await createCli(fileName, environment, parseWorkScope(options)) + const cli = await createCli(fileName, environment, parseWorkLabelScope(options, 'all')) for await (const validation of cli.validate()) { if (validation.type === 'error') { errors++ @@ -194,10 +194,65 @@ export async function getProgram( .addOption(new Option('-f, --filter ', 'filter task and services with labels')) .addOption(new Option('-e, --exclude ', 'exclude task and services with labels')) .action(async (task, options) => { - const cli = await createCli(fileName, environment, task ? { taskName: task } : parseWorkScope(options)) + const cli = await createCli( + fileName, + environment, + task ? { taskName: task } : parseWorkLabelScope(options, 'all') + ) cli.shutdown() }) + program + .command('up') + .description('start services(s)') + .addOption(new Option('-f, --filter ', 'filter task and services with labels')) + .addOption(new Option('-e, --exclude ', 'exclude task and services with labels')) + .addOption(new Option('-c, --concurrency ', 'parallel worker count').argParser(parseInt).default(4)) + .addOption(new Option('-w, --watch', 'watch tasks').default(false)) + .addOption(new Option('-d, --daemon', 'run services in background').default(false)) + .addOption( + new Option('-l, --log ', 'log mode') + .default(isCI ? 'live' : 'interactive') + .choices(['interactive', 'live', 'grouped']) + ) + .addOption( + new Option('--cache ', 'caching method to compare') + .default(isCI ? 'checksum' : 'modify-date') + .choices(['checksum', 'modify-date', 'none']) + ) + .action(async (options) => { + const scope = parseWorkLabelScope(options, 'service') + const cli = await createCli(fileName, environment, scope) + const result = await cli.runUp({ + cacheDefault: options.cache, + watch: options.watch, + workers: options.concurrency, + logMode: options.log, + daemon: options.daemon, + }) + + if (!result.success) { + program.error('Execution was not successful', { exitCode: 1 }) + } else { + process.exit() + } + }) + + program + .command('down') + .description('stop services(s)') + .addOption(new Option('-f, --filter ', 'filter task and services with labels')) + .addOption(new Option('-e, --exclude ', 'exclude task and services with labels')) + .action(async (options) => { + const scope = parseWorkLabelScope(options, 'service') + const cli = await createCli(fileName, environment, scope) + const result = await cli.runDown() + + if (!result.success) { + program.error('Execution was not successful', { exitCode: 1 }) + } + }) + program .command('exec', { isDefault: true }) .description('execute task(s)') @@ -217,8 +272,12 @@ export async function getProgram( .choices(['checksum', 'modify-date', 'none']) ) .action(async (task, options) => { - const cli = await createCli(fileName, environment, task ? { taskName: task } : parseWorkScope(options)) - const result = await cli.exec({ + const cli = await createCli( + fileName, + environment, + task ? { taskName: task } : parseWorkLabelScope(options, 'all') + ) + const result = await cli.runExec({ cacheDefault: options.cache, watch: options.watch, workers: options.concurrency, diff --git a/src/service/update-service-status.ts b/src/service/update-service-status.ts new file mode 100644 index 00000000..95157ad3 --- /dev/null +++ b/src/service/update-service-status.ts @@ -0,0 +1,32 @@ +import { Environment } from '../executer/environment' +import { iterateWorkServices } from '../planner/utils/plan-work-nodes' +import { State } from '../executer/state' + +export async function updateServiceStatus(state: State, environment: Environment): Promise { + for (const service of iterateWorkServices(state.current.service)) { + const currentServices = await environment.docker.listContainers({ + all: true, + filters: { + label: [`hammerkit-id=${service.service.id}`], + }, + }) + const currentService = currentServices[0] + if (!currentService) { + continue + } + + const servicePid = + 'hammerkit-pid' in currentService.Labels ? parseInt(currentService.Labels['hammerkit-pid']) : undefined + const serviceState = 'hammerkit-state' in currentService.Labels ? currentService.Labels['hammerkit-state'] : '' + + state.patchService({ + service: service.service, + type: 'running', + remote: { containerId: currentService.Id, pid: servicePid }, + stateKey: serviceState, + dns: { + containerId: currentService.Id, + }, + }) + } +} diff --git a/src/testing/integration/cache.spec.ts b/src/testing/integration/cache.spec.ts index 9390960c..718d99b5 100644 --- a/src/testing/integration/cache.spec.ts +++ b/src/testing/integration/cache.spec.ts @@ -42,7 +42,7 @@ describe('cache', () => { it('should mount generations of dependant tasks', async () => { const { cli, environment } = await suite.setup({ taskName: 'dependant' }) - const result = await cli.exec() + const result = await cli.runExec() await expectSuccessfulResult(result, environment) await expectLog(result, environment, `dependant`, 'node_modules') }) diff --git a/src/testing/integration/cancellation.spec.ts b/src/testing/integration/cancellation.spec.ts index b845fb7f..5a749c36 100644 --- a/src/testing/integration/cancellation.spec.ts +++ b/src/testing/integration/cancellation.spec.ts @@ -8,7 +8,7 @@ describe('cancellation', () => { async function testAbort(taskName: string, expectedState: string) { const { cli, environment } = await suite.setup({ taskName }) - const exec = await cli.execWatch({ logMode: 'live' }) + const exec = await cli.exec({ logMode: 'live' }) const abortNode = Object.values(exec.state.current.node).find((n) => n.node.name.startsWith('long_')) exec.processManager.on((evt) => { if (evt.type === 'started' && evt.context.id === abortNode?.node.id) { diff --git a/src/testing/integration/clean.spec.ts b/src/testing/integration/clean.spec.ts index 6eab1145..ff33ffa7 100644 --- a/src/testing/integration/clean.spec.ts +++ b/src/testing/integration/clean.spec.ts @@ -12,7 +12,7 @@ describe('clean', () => { it('should clean generated outputs locally', async () => { const { cli, environment } = await suite.setup({ taskName: 'example' }) - const result = await cli.exec({ cacheDefault: 'none' }) + const result = await cli.runExec({ cacheDefault: 'none' }) await expectSuccessfulResult(result, environment) const outputPath = join(suite.path, 'node_modules') @@ -24,7 +24,7 @@ describe('clean', () => { it('should clean generated outputs in containers', async () => { const { cli, environment } = await suite.setup({ taskName: 'docker:example' }) - const result = await cli.exec({ cacheDefault: 'none' }) + const result = await cli.runExec({ cacheDefault: 'none' }) await expectSuccessfulResult(result, environment) const outputPath = join(suite.path, 'node_modules') @@ -38,11 +38,11 @@ describe('clean', () => { it('should clean and restore created data in volumes', async () => { const { cli, environment } = await suite.setup({ taskName: 'example:service' }) - const result = await cli.exec() + const result = await cli.runExec() await expectSuccessfulResult(result, environment) await cli.clean({ service: true }) - const resultAfterClean = await cli.exec() + const resultAfterClean = await cli.runExec() await expectSuccessfulResult(resultAfterClean, environment) }) }) diff --git a/src/testing/integration/cmd.spec.ts b/src/testing/integration/cmd.spec.ts index 7c4ab086..f8a94092 100644 --- a/src/testing/integration/cmd.spec.ts +++ b/src/testing/integration/cmd.spec.ts @@ -8,7 +8,7 @@ describe('cmd', () => { it('should run with path arg', async () => { const { cli, environment } = await suite.setup({ taskName: 'example' }) - const result = await cli.exec() + const result = await cli.runExec() await expectSuccessfulResult(result, environment) await expectContainsLog(result, environment, `example`, 'README.md') }) diff --git a/src/testing/integration/concurrency.spec.ts b/src/testing/integration/concurrency.spec.ts index 5f6ff9ae..5db5dede 100644 --- a/src/testing/integration/concurrency.spec.ts +++ b/src/testing/integration/concurrency.spec.ts @@ -9,7 +9,7 @@ describe('concurrency', () => { it('should run with concurrency lower than total tasks', async () => { const { cli, environment } = await suite.setup({ taskName: 'example' }) - const exec = cli.execWatch({ workers: 1 }) + const exec = await cli.exec({ workers: 1 }) exec.state.on((state) => { const runningNodes = Object.values(state.node).filter((n) => n.type === 'running') expect(runningNodes.length).toBeLessThanOrEqual(1) diff --git a/src/testing/integration/docker.spec.ts b/src/testing/integration/docker.spec.ts index 2af1b1ed..2264c24f 100644 --- a/src/testing/integration/docker.spec.ts +++ b/src/testing/integration/docker.spec.ts @@ -8,7 +8,7 @@ describe('docker', () => { it('should pull docker image', async () => { const { cli, environment } = await suite.setup({ taskName: 'example' }) - const result = await cli.exec() + const result = await cli.runExec() await expectSuccessfulResult(result, environment) await expectLog(result, environment, `example`, '7.19.1') await expectLog(result, environment, `example`, 'v16.6.0') diff --git a/src/testing/integration/env.spec.ts b/src/testing/integration/env.spec.ts index 74ea640a..468998f8 100644 --- a/src/testing/integration/env.spec.ts +++ b/src/testing/integration/env.spec.ts @@ -8,28 +8,28 @@ describe('env', () => { it('should use env from build file', async () => { const { cli, environment } = await suite.setup({ taskName: 'example' }) - const result = await cli.exec() + const result = await cli.runExec() await expectSuccessfulResult(result, environment) await expectLog(result, environment, `example`, '16.6.0') }) it('should pass env to docker', async () => { const { cli, environment } = await suite.setup({ taskName: 'example_docker' }) - const result = await cli.exec() + const result = await cli.runExec() await expectSuccessfulResult(result, environment) await expectLog(result, environment, `example_docker`, '16.6.0') }) it('should use env from task', async () => { const { cli, environment } = await suite.setup({ taskName: 'example_override' }) - const result = await cli.exec() + const result = await cli.runExec() await expectSuccessfulResult(result, environment) await expectLog(result, environment, `example_override`, '15.0.0') }) it('should use env from .env', async () => { const { cli, environment } = await suite.setup({ taskName: 'example_with_dotenv' }) - const result = await cli.exec() + const result = await cli.runExec() await expectSuccessfulResult(result, environment) await expectLog(result, environment, `example_with_dotenv`, '123456') }) @@ -41,7 +41,7 @@ describe('env', () => { VERSION: '1.0.0', }, }) - const result = await cli.exec() + const result = await cli.runExec() await expectSuccessfulResult(result, environment) await expectLog(result, environment, `example_with_shell_env`, '1.0.0') }) diff --git a/src/testing/integration/error.spec.ts b/src/testing/integration/error.spec.ts index f27e535e..fb235a5e 100644 --- a/src/testing/integration/error.spec.ts +++ b/src/testing/integration/error.spec.ts @@ -7,13 +7,13 @@ describe('error', () => { it('should return error when local task failed', async () => { const { cli } = await suite.setup({ taskName: 'local_error' }) - const result = await cli.exec() + const result = await cli.runExec() expect(result.success).toBeFalsy() }) it('should return error when docker task failed', async () => { const { cli } = await suite.setup({ taskName: 'docker_error' }) - const result = await cli.exec() + const result = await cli.runExec() expect(result.success).toBeFalsy() }) }) diff --git a/src/testing/integration/export.spec.ts b/src/testing/integration/export.spec.ts index 74b88ca6..4930203c 100644 --- a/src/testing/integration/export.spec.ts +++ b/src/testing/integration/export.spec.ts @@ -8,14 +8,14 @@ describe('export', () => { it('should export created file', async () => { const { cli, environment } = await suite.setup({ taskName: 'example_file' }) - const result = await cli.exec() + const result = await cli.runExec() expect(result.success).toBeTruthy() expect(await environment.file.read(join(environment.cwd, 'test.txt'))).toEqual('hello\n') }) it('should export created directory', async () => { const { cli, environment } = await suite.setup({ taskName: 'example_dir' }) - const result = await cli.exec() + const result = await cli.runExec() expect(result.success).toBeTruthy() expect(await environment.file.read(join(environment.cwd, 'dist/test.txt'))).toEqual('hello\n') }) diff --git a/src/testing/integration/glob.spec.ts b/src/testing/integration/glob.spec.ts index ad3b902b..ce01c1a9 100644 --- a/src/testing/integration/glob.spec.ts +++ b/src/testing/integration/glob.spec.ts @@ -10,7 +10,7 @@ describe('glob', () => { async function testCache(expectInvalidate: boolean, action?: (env: Environment) => Promise) { const { cli, environment } = await suite.setup({ taskName: 'example' }) - const result1 = await cli.exec() + const result1 = await cli.runExec() await expectSuccessfulResult(result1, environment) const node = cli.node('example') @@ -25,7 +25,7 @@ describe('glob', () => { await action(environment) } - const result2 = await cli.exec() + const result2 = await cli.runExec() await expectSuccessfulResult(result2, environment) const nodeState2 = result2.state.node[node.id] diff --git a/src/testing/integration/include.spec.ts b/src/testing/integration/include.spec.ts index d3ed7b5c..ad227431 100644 --- a/src/testing/integration/include.spec.ts +++ b/src/testing/integration/include.spec.ts @@ -8,14 +8,14 @@ describe('include', () => { it('should run included task', async () => { const { cli, environment } = await suite.setup({ taskName: 'example' }) - const result = await cli.exec() + const result = await cli.runExec() await expectSuccessfulResult(result, environment) await expectLog(result, environment, `foo:bar`, 'foobar') }) it('should get name:example', async () => { const { cli, environment } = await suite.setup({ taskName: 'name:example' }) - const result = await cli.exec() + const result = await cli.runExec() await expectSuccessfulResult(result, environment) }) diff --git a/src/testing/integration/invalid.spec.ts b/src/testing/integration/invalid.spec.ts index 343ef1f3..38fe38e5 100644 --- a/src/testing/integration/invalid.spec.ts +++ b/src/testing/integration/invalid.spec.ts @@ -8,7 +8,7 @@ describe('invalid', () => { it('should throw on invalid yaml', async () => { try { - await suite.setup(emptyWorkLabelScope()) + await suite.setup(emptyWorkLabelScope('all')) expect.fail('should not be called') } catch (e: any) { expect(e.message).toStartWith('unable to parse') diff --git a/src/testing/integration/invalid_loop.spec.ts b/src/testing/integration/invalid_loop.spec.ts index 790ed065..0366ad89 100644 --- a/src/testing/integration/invalid_loop.spec.ts +++ b/src/testing/integration/invalid_loop.spec.ts @@ -7,13 +7,13 @@ describe('invalid', () => { it('should detect loop in execution', async () => { const { cli } = await suite.setup({ taskName: 'foo' }) - const result = await cli.exec() + const result = await cli.runExec() expect(result.success).toBeFalsy() }) it('should detect loop in services', async () => { const { cli } = await suite.setup({ taskName: 'loopservice' }) - const result = await cli.exec() + const result = await cli.runExec() expect(result.success).toBeFalsy() }) }) diff --git a/src/testing/integration/kubernetes.spec.ts b/src/testing/integration/kubernetes.spec.ts index 179fdf00..4979ef90 100644 --- a/src/testing/integration/kubernetes.spec.ts +++ b/src/testing/integration/kubernetes.spec.ts @@ -13,7 +13,7 @@ describe('kubernetes', () => { // CI setup missing xit('should forward deployment', async () => { const { cli, environment } = await suite.setup({ taskName: 'api' }) - const result = await cli.exec() + const result = await cli.runExec() expectSuccessfulResult(result, environment) }) }) diff --git a/src/testing/integration/labels.spec.ts b/src/testing/integration/labels.spec.ts index af39e23c..0af57b18 100644 --- a/src/testing/integration/labels.spec.ts +++ b/src/testing/integration/labels.spec.ts @@ -10,6 +10,7 @@ describe('local', () => { const { cli } = await suite.setup({ filterLabels: { app: ['bar'] }, excludeLabels: {}, + mode: 'all', }) expectNodes(cli.ls(), ['bar', 'base']) }) @@ -18,6 +19,7 @@ describe('local', () => { const { cli } = await suite.setup({ filterLabels: { app: ['foo'] }, excludeLabels: {}, + mode: 'all', }) expectNodes(cli.ls(), ['foo', 'base']) }) @@ -26,6 +28,7 @@ describe('local', () => { const { cli } = await suite.setup({ filterLabels: {}, excludeLabels: { app: ['foo'] }, + mode: 'all', }) expectNodes(cli.ls(), ['bar', 'base']) }) @@ -34,6 +37,7 @@ describe('local', () => { const { cli } = await suite.setup({ filterLabels: {}, excludeLabels: { app: ['base'] }, + mode: 'all', }) expectNodes(cli.ls(), []) }) @@ -42,6 +46,7 @@ describe('local', () => { const { cli } = await suite.setup({ filterLabels: { app: ['foo', 'bar'] }, excludeLabels: {}, + mode: 'all', }) expectNodes(cli.ls(), ['foo', 'bar', 'base']) }) diff --git a/src/testing/integration/local.spec.ts b/src/testing/integration/local.spec.ts index 6bc4605c..62ec3188 100644 --- a/src/testing/integration/local.spec.ts +++ b/src/testing/integration/local.spec.ts @@ -8,7 +8,7 @@ describe('local', () => { it('should run local task', async () => { const { cli, environment } = await suite.setup({ taskName: 'example' }) - const result = await cli.exec() + const result = await cli.runExec() await expectSuccessfulResult(result, environment) }) }) diff --git a/src/testing/integration/monorepo.spec.ts b/src/testing/integration/monorepo.spec.ts index 58ec0573..eae60bcb 100644 --- a/src/testing/integration/monorepo.spec.ts +++ b/src/testing/integration/monorepo.spec.ts @@ -8,7 +8,7 @@ describe('monorepo', () => { it('should build and clean monorepo', async () => { const { cli, environment } = await suite.setup({ taskName: 'build' }) - const result = await cli.exec() + const result = await cli.runExec() await expectSuccessfulResult(result, environment) await cli.clean() }, 120000) diff --git a/src/testing/integration/reference.spec.ts b/src/testing/integration/reference.spec.ts index 7b1a1a34..9c792e41 100644 --- a/src/testing/integration/reference.spec.ts +++ b/src/testing/integration/reference.spec.ts @@ -9,14 +9,14 @@ describe('reference', () => { it('should run included task', async () => { const { cli, environment } = await suite.setup({ taskName: 'example' }) - const result = await cli.exec() + const result = await cli.runExec() await expectSuccessfulResult(result, environment) await expectLog(result, environment, `foo:bar`, 'foobar') await expectLog(result, environment, `example`, 'hammertime') }) it('should list task with references tasks nested', async () => { - const { cli } = await suite.setup(emptyWorkLabelScope()) + const { cli } = await suite.setup(emptyWorkLabelScope('all')) const workNodes = cli.ls() expect(workNodes.map((t) => t.item.name)).toEqual(['foo:bardb', 'example', 'foo:bar', 'foo:sub:sub']) }) diff --git a/src/testing/integration/services.spec.ts b/src/testing/integration/services.spec.ts index a1468984..d82ef23c 100644 --- a/src/testing/integration/services.spec.ts +++ b/src/testing/integration/services.spec.ts @@ -15,13 +15,25 @@ describe('services', () => { it('should run with needed service', async () => { const { cli, environment } = await suite.setup({ taskName: 'api' }) - const result = await cli.exec() + const result = await cli.runExec() await expectSuccessfulResult(result, environment) }, 120000) it('should start needs of service', async () => { const { cli, environment } = await suite.setup({ taskName: 'test' }) - const result = await cli.exec() + const result = await cli.runExec() await expectSuccessfulResult(result, environment) }, 120000) + + it('should start services up', async () => { + const { cli, environment } = await suite.setup({ + mode: 'service', + filterLabels: { task: ['dev'] }, + excludeLabels: {}, + }) + const upResult = await cli.runUp({ daemon: true }) + expect(upResult.success).toBeTrue() + const downResult = await cli.runDown() + expect(downResult.success).toBeTrue() + }, 120000) }) diff --git a/src/testing/integration/store.spec.ts b/src/testing/integration/store.spec.ts index 87d81a01..fc398f4e 100644 --- a/src/testing/integration/store.spec.ts +++ b/src/testing/integration/store.spec.ts @@ -17,7 +17,7 @@ describe('store/restore', () => { expect(existsSync(generatedPath)).toBeFalsy() expect(existsSync(cacheStoragePath)).toBeFalsy() - const result = await cli.exec({}) + const result = await cli.runExec({}) await expectSuccessfulResult(result, environment) expect(existsSync(generatedPath)).toBeTruthy() @@ -32,7 +32,7 @@ describe('store/restore', () => { await cli.restore(cacheStoragePath) expect(existsSync(generatedPath)).toBeTruthy() - const execAfterRestore = await cli.exec() + const execAfterRestore = await cli.runExec() await expectSuccessfulResult(execAfterRestore, environment) const node = cli.node('example') @@ -48,14 +48,14 @@ describe('store/restore', () => { const cacheStoragePath = join(environment.cwd, 'storage') - const firstExecResult = await cli.exec() + const firstExecResult = await cli.runExec() await expectSuccessfulResult(firstExecResult, environment) await cli.store(cacheStoragePath) await cli.clean() await cli.restore(cacheStoragePath) - const execAfterRestore = await cli.exec() + const execAfterRestore = await cli.runExec() await expectSuccessfulResult(execAfterRestore, environment) const node = cli.node('example:docker') diff --git a/src/testing/integration/watch-dependency.spec.ts b/src/testing/integration/watch-dependency.spec.ts index 636d4b04..29b0f0be 100644 --- a/src/testing/integration/watch-dependency.spec.ts +++ b/src/testing/integration/watch-dependency.spec.ts @@ -8,7 +8,7 @@ describe('watch-dependency', () => { it('should run watch task', async () => { const { cli, environment } = await suite.setup({ taskName: 'third' }) const thirdNode = cli.node('third') - const exec = cli.execWatch({ watch: true }) + const exec = await cli.exec({ watch: true }) let content = '' let changed = false diff --git a/src/testing/integration/watch.spec.ts b/src/testing/integration/watch.spec.ts index 148921a2..87fae489 100644 --- a/src/testing/integration/watch.spec.ts +++ b/src/testing/integration/watch.spec.ts @@ -9,7 +9,7 @@ describe('watch', () => { it('should run watch task and cancel', async () => { const { cli, environment } = await suite.setup({ taskName: 'api' }) const apiNode = cli.node('api') - const exec = cli.execWatch({ watch: true }) + const exec = await cli.exec({ watch: true }) exec.processManager.on((evt) => { if (evt.context.id === apiNode.id && evt.type === 'started') { environment.abortCtrl.abort() @@ -23,7 +23,7 @@ describe('watch', () => { it('should restart task if dependency updates', async () => { const { cli, environment } = await suite.setup({ taskName: 'api' }) const apiNode = cli.node('api') - const exec = cli.execWatch({ watch: true }) + const exec = await cli.exec({ watch: true }) let appendedFile = false let restarted = false