UNPKG

@arc-fusion/cli

Version:

CLI for running Arc Fusion on your local machine

137 lines (118 loc) 3.55 kB
'use strict' const path = require('path') const yamljs = require('yamljs') const spawn = require('./spawn') const { exec, mkdirp, touch, writeFile, gentleCopy } = require('./promises') const { getEnvVariables, createDockerVolume, deleteDockerVolume } = require('./local') const { ADMIN_RELEASE, FUSION_RELEASE, FUSION_ROOT, PROJECT_ROOT, REPO_NAME, BOOTSTRAP_ROOT, CACHE_VOLUME_NAME } = require('../environment') async function dockerList (op) { const { stdout: processes } = await exec(`docker ${op}`) return processes .split('\n') .slice(1) } function generateFilename (name) { if (!name) return '' return name.replace(/^\.*/, '.').replace(/\.+$/, '') } function dockerComposeOp (op) { const cwd = PROJECT_ROOT return async function (name) { return spawn( 'docker-compose', ['-f', path.join(PROJECT_ROOT, '.fusion', `docker-compose${generateFilename(name)}.yml`)].concat(op || []), { cwd, env: { ...process.env, COMPOSE_PROJECT_NAME: `fusion${name ? `_${name}` : ''}` }, stdio: 'inherit' } ) } } const daemon = dockerComposeOp(['up', '-d']) async function down (name) { return dockerComposeOp(['down', '-v'])(name) } async function build (name) { // don't pull images if we are linking the fusion repo locally (it'll use the cached one if you already pulled it before) const { FUSION_REPO } = await getEnvVariables(PROJECT_ROOT) const useImages = name || !FUSION_REPO // `docker-compose pull` updates all services that are image-based if (useImages) await dockerComposeOp('pull')(name) // need to build engine first if we are not using an image and it is the default start if (!useImages && !name) await dockerComposeOp(['build', '--progress=plain', 'engine'])(name) // `docker-compose build --pull` updates the base FROM image for all services that are built return dockerComposeOp(useImages ? ['build', '--pull'] : ['build', '--progress=plain'])(name) } async function generate (name, options = {}) { await touch(path.join(PROJECT_ROOT, '.env')) await mkdirp(path.join(PROJECT_ROOT, '.fusion')) await gentleCopy(path.join(BOOTSTRAP_ROOT, '.dockerignore'), path.join(PROJECT_ROOT, '.dockerignore')) const props = { ADMIN_RELEASE, FUSION_ROOT, PROJECT_ROOT, REPO_NAME, CACHE_VOLUME_NAME, ...options } const is20 = /^(2\.0\.|2\.0$)/i.test(FUSION_RELEASE) const dockerCompose = await require(`../../templates/${is20 ? '2.0/' : ''}docker-compose${generateFilename(name)}.yml`)(props) return writeFile(path.join(PROJECT_ROOT, '.fusion', `docker-compose${generateFilename(name)}.yml`), yamljs.stringify(dockerCompose, 10, 2)) } async function run (name, options) { try { await generate(name, options) await down(name) await build(name) return await start(name) // } catch (e) { } finally { await down(name) } } async function start (name, rebuild) { const operation = ['up'] if (name) { operation.push('--abort-on-container-exit') operation.push(`--exit-code-from=${name}`) operation.push('--remove-orphans') } else { // create an external cache volume so we can persist webpack caching across runs if (rebuild) await deleteDockerVolume(CACHE_VOLUME_NAME) await createDockerVolume(CACHE_VOLUME_NAME) } return dockerComposeOp(operation)(name) } const stop = dockerComposeOp('stop') module.exports = { build, daemon, dockerList, down, generate, run, start, stop }