balena-cli
Version:
The official balena Command Line Interface
1,002 lines • 42.1 kB
JavaScript
Object.defineProperty(exports, "__esModule", { value: true });
exports.composeCliFlags = void 0;
exports.parseReleaseTagKeysAndValues = parseReleaseTagKeysAndValues;
exports.applyReleaseTagKeysAndValues = applyReleaseTagKeysAndValues;
exports.loadProject = loadProject;
exports.buildProject = buildProject;
exports.makeImageName = makeImageName;
exports.getServiceDirsFromComposition = getServiceDirsFromComposition;
exports.isBuildConfig = isBuildConfig;
exports.tarDirectory = tarDirectory;
exports.checkBuildSecretsRequirements = checkBuildSecretsRequirements;
exports.getRegistrySecrets = getRegistrySecrets;
exports.makeBuildTasks = makeBuildTasks;
exports.validateProjectDirectory = validateProjectDirectory;
exports.deployProject = deployProject;
exports.createSpinner = createSpinner;
exports.createRunLoop = createRunLoop;
const core_1 = require("@oclif/core");
const fs_1 = require("fs");
const yaml = require("js-yaml");
const _ = require("lodash");
const path = require("path");
const semver = require("semver");
const errors_1 = require("../errors");
const lazy_1 = require("./lazy");
const Logger = require("./logger");
const which_1 = require("./which");
const allowedContractTypes = ['sw.application', 'sw.block'];
function parseReleaseTagKeysAndValues(releaseTags) {
if (releaseTags.length === 0) {
return { releaseTagKeys: [], releaseTagValues: [] };
}
const releaseTagKeys = releaseTags.filter((_v, i) => i % 2 === 0);
const releaseTagValues = releaseTags.filter((_v, i) => i % 2 === 1);
releaseTagKeys.forEach((key) => {
if (key === '') {
throw new errors_1.ExpectedError(`Error: --release-tag keys cannot be empty`);
}
if (/\s/.test(key)) {
throw new errors_1.ExpectedError(`Error: --release-tag keys cannot contain whitespaces`);
}
});
if (releaseTagKeys.length !== releaseTagValues.length) {
releaseTagValues.push('');
}
return { releaseTagKeys, releaseTagValues };
}
async function applyReleaseTagKeysAndValues(sdk, releaseId, releaseTagKeys, releaseTagValues) {
if (releaseTagKeys.length === 0) {
return;
}
await Promise.all(_.zip(releaseTagKeys, releaseTagValues).map(async ([key, value]) => {
await sdk.models.release.tags.set(releaseId, key, value);
}));
}
const LOG_LENGTH_MAX = 512 * 1024;
const compositionFileNames = ['docker-compose.yml', 'docker-compose.yaml'];
async function loadProject(logger, opts, image, imageTag) {
const compose = await Promise.resolve().then(() => require('@balena/compose/dist/parse'));
const { createProject } = await Promise.resolve().then(() => require('./compose'));
let composeName;
let composeStr;
logger.logDebug('Loading project...');
if (image) {
logger.logInfo(`Creating default composition with image: "${image}"`);
composeStr = compose.defaultComposition(image);
}
else {
logger.logDebug('Resolving project...');
[composeName, composeStr] = await resolveProject(logger, opts.projectPath);
if (composeName) {
if (opts.dockerfilePath) {
logger.logWarn(`Ignoring alternative dockerfile "${opts.dockerfilePath}" because composition file "${composeName}" exists`);
}
}
else {
logger.logInfo(`Creating default composition with source: "${opts.projectPath}"`);
composeStr = compose.defaultComposition(undefined, opts.dockerfilePath);
}
if (opts.isLocal) {
composeStr = await mergeDevComposeOverlay(logger, composeStr, opts.projectPath);
}
}
logger.logDebug('Creating project...');
return createProject(opts.projectPath, composeStr, opts.projectName, imageTag);
}
async function mergeDevComposeOverlay(logger, composeStr, projectRoot) {
const devOverlayFilename = 'docker-compose.dev.yml';
const devOverlayPath = path.join(projectRoot, devOverlayFilename);
if (await (0, which_1.exists)(devOverlayPath)) {
logger.logInfo(`Docker compose dev overlay detected (${devOverlayFilename}) - merging.`);
const loadObj = (inputStr) => (yaml.load(inputStr) || {});
try {
const compose = loadObj(composeStr);
const devOverlay = loadObj(await fs_1.promises.readFile(devOverlayPath, 'utf8'));
compose.services = { ...compose.services, ...devOverlay.services };
composeStr = yaml.dump(compose, { styles: { '!!null': 'empty' } });
}
catch (err) {
err.message = `Error merging docker compose dev overlay file "${devOverlayPath}":\n${err.message}`;
throw err;
}
}
return composeStr;
}
async function resolveProject(logger, projectRoot, quiet = false) {
let composeFileName = '';
let composeFileContents = '';
for (const fname of compositionFileNames) {
const fpath = path.join(projectRoot, fname);
if (await (0, which_1.exists)(fpath)) {
logger.logDebug(`${fname} file found at "${projectRoot}"`);
composeFileName = fname;
try {
composeFileContents = await fs_1.promises.readFile(fpath, 'utf8');
}
catch (err) {
logger.logError(`Error reading composition file "${fpath}":\n${err}`);
throw err;
}
break;
}
}
if (!quiet && !composeFileName) {
logger.logInfo(`No "docker-compose.yml" file found at "${projectRoot}"`);
}
return [composeFileName, composeFileContents];
}
async function buildProject(opts) {
await checkBuildSecretsRequirements(opts.docker, opts.projectPath);
const compose = await Promise.resolve().then(() => require('@balena/compose/dist/parse'));
const imageDescriptors = compose.parse(opts.composition);
const renderer = await startRenderer({ imageDescriptors, ...opts });
let buildSummaryByService;
try {
const { awaitInterruptibleTask } = await Promise.resolve().then(() => require('./helpers'));
const [images, summaryMsgByService] = await awaitInterruptibleTask($buildProject, imageDescriptors, renderer, opts);
buildSummaryByService = summaryMsgByService;
return images;
}
finally {
renderer.end(buildSummaryByService);
}
}
async function $buildProject(imageDescriptors, renderer, opts) {
const { logger, projectName } = opts;
logger.logInfo(`Building for ${opts.arch}/${opts.deviceType}`);
const needsQemu = await installQemuIfNeeded({ ...opts, imageDescriptors });
const tarStream = await tarDirectory(opts.projectPath, opts);
const tasks = await makeBuildTasks(opts.composition, tarStream, opts, logger, projectName);
const imageDescriptorsByServiceName = _.keyBy(imageDescriptors, 'serviceName');
setTaskAttributes({ tasks, imageDescriptorsByServiceName, ...opts });
const transposeOptArray = await Promise.all(tasks.map((task) => {
if (needsQemu && !task.external) {
return qemuTransposeBuildStream({ task, ...opts });
}
}));
await Promise.all(transposeOptArray.map((transposeOptions, index) => setTaskProgressHooks({
task: tasks[index],
renderer,
transposeOptions,
...opts,
})));
logger.logDebug('Prepared tasks; building...');
const { BALENA_ENGINE_TMP_PATH } = await Promise.resolve().then(() => require('../config'));
const builder = await Promise.resolve().then(() => require('@balena/compose/dist/multibuild'));
const builtImages = await builder.performBuilds(tasks, opts.docker, BALENA_ENGINE_TMP_PATH);
return await inspectBuiltImages({
builtImages,
imageDescriptorsByServiceName,
tasks,
...opts,
});
}
async function startRenderer({ imageDescriptors, inlineLogs, logger, }) {
let renderer;
if (inlineLogs) {
renderer = new (await Promise.resolve().then(() => require('./compose'))).BuildProgressInline(logger.streams['build'], imageDescriptors);
}
else {
const tty = (await Promise.resolve().then(() => require('./tty')))(process.stdout);
renderer = new (await Promise.resolve().then(() => require('./compose'))).BuildProgressUI(tty, imageDescriptors);
}
renderer.start();
return renderer;
}
async function installQemuIfNeeded({ arch, docker, emulated, imageDescriptors, logger, projectPath, }) {
const qemu = await Promise.resolve().then(() => require('./qemu'));
const needsQemu = await qemu.installQemuIfNeeded(emulated, logger, arch, docker);
if (needsQemu) {
logger.logInfo('Emulation is enabled');
await Promise.all(imageDescriptors.map(function (d) {
if (isBuildConfig(d.image)) {
return qemu.copyQemu(path.join(projectPath, d.image.context || '.'), arch);
}
}));
}
return needsQemu;
}
function makeImageName(projectName, serviceName, tag) {
let name = `${projectName}_${serviceName}`;
if (tag) {
name = [name, tag].map((s) => s.replace(/:/g, '_')).join(':');
}
return name.toLowerCase();
}
function setTaskAttributes({ tasks, buildOpts, imageDescriptorsByServiceName, projectName, }) {
var _a, _b;
for (const task of tasks) {
const d = imageDescriptorsByServiceName[task.serviceName];
(_a = task.tag) !== null && _a !== void 0 ? _a : (task.tag = makeImageName(projectName, task.serviceName, buildOpts.t));
if (isBuildConfig(d.image)) {
d.image.tag = task.tag;
}
task.args = {
...task.args,
...buildOpts.buildargs,
};
(_b = task.dockerOpts) !== null && _b !== void 0 ? _b : (task.dockerOpts = {});
if (task.args && Object.keys(task.args).length) {
task.dockerOpts.buildargs = {
...task.dockerOpts.buildargs,
...task.args,
};
}
_.merge(task.dockerOpts, buildOpts, { t: task.tag });
}
}
async function qemuTransposeBuildStream({ task, dockerfilePath, projectPath, }) {
var _a;
const qemu = await Promise.resolve().then(() => require('./qemu'));
const binPath = qemu.qemuPathInContext(path.join(projectPath, (_a = task.context) !== null && _a !== void 0 ? _a : ''));
if (task.buildStream == null) {
throw new Error(`No buildStream for task '${task.tag}'`);
}
const transpose = await Promise.resolve().then(() => require('@balena/compose/dist/emulate'));
const { toPosixPath } = (await Promise.resolve().then(() => require('@balena/compose/dist/multibuild')))
.PathUtils;
const transposeOptions = {
hostQemuPath: toPosixPath(binPath),
containerQemuPath: `/tmp/${qemu.QEMU_BIN_NAME}`,
qemuFileMode: 0o555,
};
task.buildStream = (await transpose.transposeTarStream(task.buildStream, transposeOptions, dockerfilePath || undefined));
return transposeOptions;
}
async function setTaskProgressHooks({ inlineLogs, renderer, task, transposeOptions, }) {
const transpose = await Promise.resolve().then(() => require('@balena/compose/dist/emulate'));
const logStream = renderer.streams[task.serviceName];
task.logBuffer = [];
const captureStream = buildLogCapture(task.external, task.logBuffer);
if (task.external) {
captureStream.pipe(logStream);
task.progressHook = pullProgressAdapter(captureStream);
}
else {
task.streamHook = function (stream) {
let rawStream;
stream = createLogStream(stream);
if (transposeOptions) {
const buildThroughStream = transpose.getBuildThroughStream(transposeOptions);
rawStream = stream.pipe(buildThroughStream);
}
else {
rawStream = stream;
}
return rawStream
.pipe(dropEmptyLinesStream())
.pipe(captureStream)
.pipe(buildProgressAdapter(!!inlineLogs))
.pipe(logStream);
};
}
}
async function inspectBuiltImages({ builtImages, docker, imageDescriptorsByServiceName, tasks, }) {
const images = await Promise.all(builtImages.map((builtImage) => inspectBuiltImage({
builtImage,
docker,
imageDescriptorsByServiceName,
tasks,
})));
const humanize = require('humanize');
const summaryMsgByService = {};
for (const image of images) {
summaryMsgByService[image.serviceName] = `Image size: ${humanize.filesize(image.props.size)}`;
}
return [images, summaryMsgByService];
}
async function inspectBuiltImage({ builtImage, docker, imageDescriptorsByServiceName, tasks, }) {
var _a, _b;
if (!builtImage.successful) {
const error = (_a = builtImage.error) !== null && _a !== void 0 ? _a : new Error();
error.serviceName = builtImage.serviceName;
throw error;
}
const d = imageDescriptorsByServiceName[builtImage.serviceName];
const task = _.find(tasks, {
serviceName: builtImage.serviceName,
});
const image = {
serviceName: d.serviceName,
name: (isBuildConfig(d.image) ? d.image.tag : d.image) || '',
logs: truncateString(((_b = task === null || task === void 0 ? void 0 : task.logBuffer) === null || _b === void 0 ? void 0 : _b.join('\n')) || '', LOG_LENGTH_MAX),
props: {
dockerfile: builtImage.dockerfile,
projectType: builtImage.projectType,
},
};
if (builtImage.startTime) {
image.props.startTime = new Date(builtImage.startTime);
}
if (builtImage.endTime) {
image.props.endTime = new Date(builtImage.endTime);
}
image.props.size = (await docker.getImage(image.name).inspect()).Size;
return image;
}
async function loadBuildMetatada(sourceDir) {
let metadataPath = '';
let rawString = '';
outer: for (const fName of ['balena', 'resin']) {
for (const fExt of ['yml', 'yaml', 'json']) {
metadataPath = path.join(sourceDir, `.${fName}`, `${fName}.${fExt}`);
try {
rawString = await fs_1.promises.readFile(metadataPath, 'utf8');
break outer;
}
catch (err) {
if (err.code === 'ENOENT') {
continue;
}
else {
throw err;
}
}
}
}
if (!rawString) {
return [{}, ''];
}
let buildMetadata;
try {
if (metadataPath.endsWith('json')) {
buildMetadata = JSON.parse(rawString);
}
else {
buildMetadata = yaml.load(rawString);
}
}
catch (err) {
throw new errors_1.ExpectedError(`Error parsing file "${metadataPath}":\n ${err.message}`);
}
return [buildMetadata, metadataPath];
}
async function getServiceDirsFromComposition(sourceDir, composition) {
var _a;
const { createProject } = await Promise.resolve().then(() => require('./compose'));
const serviceDirs = {};
if (!composition) {
const [, composeStr] = await resolveProject(Logger.getLogger(), sourceDir, true);
if (composeStr) {
composition = createProject(sourceDir, composeStr).composition;
}
}
if (composition === null || composition === void 0 ? void 0 : composition.services) {
const relPrefix = '.' + path.sep;
for (const [serviceName, service] of Object.entries(composition.services)) {
let dir = (typeof service.build === 'string'
? service.build
: (_a = service.build) === null || _a === void 0 ? void 0 : _a.context) || '.';
dir = path.normalize(dir);
if (path.isAbsolute(dir)) {
dir = path.relative(sourceDir, dir);
}
dir = dir.endsWith(path.sep) ? dir.slice(0, -1) : dir;
dir = dir.startsWith(relPrefix) ? dir.slice(2) : dir;
serviceDirs[serviceName] = dir || '.';
}
}
return serviceDirs;
}
function isBuildConfig(image) {
return image != null && typeof image !== 'string';
}
async function tarDirectory(dir, { composition, convertEol = false, multiDockerignore = false, preFinalizeCallback, }) {
const { filterFilesWithDockerignore } = await Promise.resolve().then(() => require('./ignore'));
const { toPosixPath } = (await Promise.resolve().then(() => require('@balena/compose/dist/multibuild')))
.PathUtils;
let readFile;
if (process.platform === 'win32') {
const { readFileWithEolConversion } = require('./eol-conversion');
readFile = (file) => readFileWithEolConversion(file, convertEol);
}
else {
readFile = fs_1.promises.readFile;
}
const tar = await Promise.resolve().then(() => require('tar-stream'));
const pack = tar.pack();
const serviceDirs = await getServiceDirsFromComposition(dir, composition);
const { filteredFileList, dockerignoreFiles } = await filterFilesWithDockerignore(dir, multiDockerignore, serviceDirs);
printDockerignoreWarn(dockerignoreFiles, serviceDirs, multiDockerignore);
for (const fileStats of filteredFileList) {
pack.entry({
name: toPosixPath(fileStats.relPath),
mtime: fileStats.stats.mtime,
mode: fileStats.stats.mode,
size: fileStats.stats.size,
}, await readFile(fileStats.filePath));
}
if (preFinalizeCallback) {
await preFinalizeCallback(pack);
}
pack.finalize();
return pack;
}
function printDockerignoreWarn(dockerignoreFiles, serviceDirsByService, multiDockerignore) {
let rootDockerignore;
const logger = Logger.getLogger();
const relPrefix = '.' + path.sep;
const serviceDirs = Object.values(serviceDirsByService || {});
const unusedFiles = dockerignoreFiles.filter((dockerignoreStats) => {
let dirname = path.dirname(dockerignoreStats.relPath);
dirname = dirname.startsWith(relPrefix) ? dirname.slice(2) : dirname;
const isProjectRootDir = !dirname || dirname === '.';
if (isProjectRootDir) {
rootDockerignore = dockerignoreStats;
return false;
}
if (multiDockerignore) {
for (const serviceDir of serviceDirs) {
if (serviceDir === dirname) {
return false;
}
}
}
return true;
});
const msg = [];
let logFunc = logger.logWarn;
if (unusedFiles.length) {
msg.push('The following .dockerignore file(s) will not be used:', ...unusedFiles.map((fileStats) => `* ${fileStats.filePath}`));
if (multiDockerignore) {
msg.push((0, lazy_1.stripIndent) `
When --multi-dockerignore (-m) is used, only .dockerignore files at the
root of each service's build context (in a microservices/multicontainer
fleet), plus a .dockerignore file at the overall project root, are used.
See "balena help ${Logger.command}" for more details.`);
}
else {
msg.push((0, lazy_1.stripIndent) `
By default, only one .dockerignore file at the source folder (project
root) is used. Microservices (multicontainer) fleets may use a separate
.dockerignore file for each service with the --multi-dockerignore (-m)
option. See "balena help ${Logger.command}" for more details.`);
}
}
else if (multiDockerignore) {
logFunc = logger.logInfo;
if (serviceDirs.length && rootDockerignore) {
msg.push((0, lazy_1.stripIndent) `
The --multi-dockerignore option is being used, and a .dockerignore file was
found at the project source (root) directory. Note that this file will not
be used to filter service subdirectories. See "balena help ${Logger.command}".`);
}
else if (serviceDirs.length === 0) {
msg.push((0, lazy_1.stripIndent) `
The --multi-dockerignore (-m) option was specified, but it has no effect for
single-container (non-microservices) fleets. Only one .dockerignore file at the
project source (root) directory, if any, is used. See "balena help ${Logger.command}".`);
}
}
if (msg.length) {
const { warnify } = require('./messages');
logFunc.call(logger, ' \n' + warnify(msg.join('\n'), ''));
}
}
async function checkBuildSecretsRequirements(docker, sourceDir) {
const [metaObj, metaFilename] = await loadBuildMetatada(sourceDir);
if (metaObj && !_.isEmpty(metaObj['build-secrets'])) {
const dockerUtils = await Promise.resolve().then(() => require('./docker'));
const isBalenaEngine = await dockerUtils.isBalenaEngine(docker);
if (!isBalenaEngine) {
throw new errors_1.ExpectedError((0, lazy_1.stripIndent) `
The "build secrets" feature currently requires balenaEngine, but a standard Docker
daemon was detected. Please use command-line options to specify the hostname and
port number (or socket path) of a balenaEngine daemon, running on a balena device
or a virtual machine with balenaOS. If the build secrets feature is not required,
comment out or delete the 'build-secrets' entry in the file:
"${metaFilename}"
`);
}
}
}
async function getRegistrySecrets(sdk, inputFilename) {
if (inputFilename != null) {
return await parseRegistrySecrets(inputFilename);
}
const directory = await sdk.settings.get('dataDirectory');
const potentialPaths = [
path.join(directory, 'secrets.yml'),
path.join(directory, 'secrets.yaml'),
path.join(directory, 'secrets.json'),
];
for (const potentialPath of potentialPaths) {
if (await (0, which_1.exists)(potentialPath)) {
return await parseRegistrySecrets(potentialPath);
}
}
return {};
}
async function parseRegistrySecrets(secretsFilename) {
try {
let isYaml = false;
if (/.+\.ya?ml$/i.test(secretsFilename)) {
isYaml = true;
}
else if (!/.+\.json$/i.test(secretsFilename)) {
throw new errors_1.ExpectedError('Filename must end with .json, .yml or .yaml');
}
const raw = (await fs_1.promises.readFile(secretsFilename)).toString();
const multiBuild = await Promise.resolve().then(() => require('@balena/compose/dist/multibuild'));
const registrySecrets = new multiBuild.RegistrySecretValidator().validateRegistrySecrets(isYaml ? yaml.load(raw) : JSON.parse(raw));
multiBuild.addCanonicalDockerHubEntry(registrySecrets);
return registrySecrets;
}
catch (error) {
throw new errors_1.ExpectedError(`Error validating registry secrets file "${secretsFilename}":\n${error.message}`);
}
}
async function makeBuildTasks(composition, tarStream, deviceInfo, logger, projectName, releaseHash = 'unavailable', preprocessHook) {
const multiBuild = await Promise.resolve().then(() => require('@balena/compose/dist/multibuild'));
const buildTasks = await multiBuild.splitBuildStream(composition, tarStream);
logger.logDebug('Found build tasks:');
_.each(buildTasks, (task) => {
let infoStr;
if (task.external) {
infoStr = `image pull [${task.imageName}]`;
}
else {
infoStr = `build [${task.context}]`;
}
logger.logDebug(` ${task.serviceName}: ${infoStr}`);
task.logger = logger.getAdapter();
});
logger.logDebug(`Resolving services with [${deviceInfo.deviceType}|${deviceInfo.arch}]`);
await performResolution(buildTasks, deviceInfo, projectName, releaseHash, preprocessHook);
logger.logDebug('Found project types:');
_.each(buildTasks, (task) => {
if (task.external) {
logger.logDebug(` ${task.serviceName}: External image`);
}
else {
logger.logDebug(` ${task.serviceName}: ${task.projectType}`);
}
});
return buildTasks;
}
async function performResolution(tasks, deviceInfo, appName, releaseHash, preprocessHook) {
const multiBuild = await Promise.resolve().then(() => require('@balena/compose/dist/multibuild'));
const resolveListeners = {};
const resolvePromise = new Promise((_resolve, reject) => {
resolveListeners.error = [reject];
});
const buildTasks = multiBuild.performResolution(tasks, deviceInfo.arch, deviceInfo.deviceType, resolveListeners, {
BALENA_RELEASE_HASH: releaseHash,
BALENA_APP_NAME: appName,
}, preprocessHook);
await Promise.race([resolvePromise, resolveTasks(buildTasks)]);
return buildTasks;
}
async function resolveTasks(buildTasks) {
const { cloneTarStream } = await Promise.resolve().then(() => require('tar-utils'));
for (const buildTask of buildTasks) {
if (!buildTask.buildStream) {
continue;
}
let error;
try {
buildTask.buildStream = await cloneTarStream(buildTask.buildStream);
}
catch (e) {
error = e;
}
if (error || (!buildTask.external && !buildTask.resolved)) {
const cause = error ? `${error}\n` : '';
throw new errors_1.ExpectedError(`${cause}Project type for service "${buildTask.serviceName}" could not be determined. Missing a Dockerfile?`);
}
}
}
async function validateSpecifiedDockerfile(projectPath, dockerfilePath) {
const { contains, toNativePath, toPosixPath } = (await Promise.resolve().then(() => require('@balena/compose/dist/multibuild'))).PathUtils;
const nativeProjectPath = path.normalize(projectPath);
const nativeDockerfilePath = path.normalize(toNativePath(dockerfilePath));
if (path.isAbsolute(nativeDockerfilePath)) {
throw new errors_1.ExpectedError((0, lazy_1.stripIndent) `
Error: the specified Dockerfile cannot be an absolute path. The path must be
relative to, and not a parent folder of, the project's source folder.
Specified dockerfile: "${nativeDockerfilePath}"
Project's source folder: "${nativeProjectPath}"
`);
}
if (nativeDockerfilePath.startsWith('..')) {
throw new errors_1.ExpectedError((0, lazy_1.stripIndent) `
Error: the specified Dockerfile cannot be in a parent folder of the project's
source folder. Note that the path should be relative to the project's source
folder, not the current folder.
Specified dockerfile: "${nativeDockerfilePath}"
Project's source folder: "${nativeProjectPath}"
`);
}
const fullDockerfilePath = path.join(nativeProjectPath, nativeDockerfilePath);
if (!(await (0, which_1.exists)(fullDockerfilePath))) {
throw new errors_1.ExpectedError((0, lazy_1.stripIndent) `
Error: specified Dockerfile not found:
Specified dockerfile: "${fullDockerfilePath}"
Project's source folder: "${nativeProjectPath}"
Note that the specified Dockerfile path should be relative to the source folder.
`);
}
if (!contains(nativeProjectPath, fullDockerfilePath)) {
throw new errors_1.ExpectedError((0, lazy_1.stripIndent) `
Error: the specified Dockerfile must be in a subfolder of the source folder:
Specified dockerfile: "${fullDockerfilePath}"
Project's source folder: "${nativeProjectPath}"
`);
}
return toPosixPath(nativeDockerfilePath);
}
async function validateProjectDirectory(sdk, opts) {
if (!(await (0, which_1.exists)(opts.projectPath)) ||
!(await fs_1.promises.stat(opts.projectPath)).isDirectory()) {
throw new errors_1.ExpectedError(`Could not access source folder: "${opts.projectPath}"`);
}
const result = {
dockerfilePath: opts.dockerfilePath || '',
registrySecrets: {},
};
if (opts.dockerfilePath) {
result.dockerfilePath = await validateSpecifiedDockerfile(opts.projectPath, opts.dockerfilePath);
}
else {
const files = await fs_1.promises.readdir(opts.projectPath);
const projectMatch = (file) => /^(Dockerfile|Dockerfile\.\S+|docker-compose.ya?ml|package.json)$/.test(file);
if (!_.some(files, projectMatch)) {
throw new errors_1.ExpectedError((0, lazy_1.stripIndent) `
Error: no "Dockerfile[.*]", "docker-compose.yml" or "package.json" file
found in source folder "${opts.projectPath}"
`);
}
if (!opts.noParentCheck) {
const checkCompose = async (folder) => {
return _.some(await Promise.all(compositionFileNames.map((filename) => (0, which_1.exists)(path.join(folder, filename)))));
};
const [hasCompose, hasParentCompose] = await Promise.all([
checkCompose(opts.projectPath),
checkCompose(path.join(opts.projectPath, '..')),
]);
if (!hasCompose && hasParentCompose) {
const msg = (0, lazy_1.stripIndent) `
"docker-compose.y[a]ml" file found in parent directory: please check that
the correct source folder was specified. (Suppress with '--noparent-check'.)`;
throw new errors_1.ExpectedError(`Error: ${msg}`);
}
}
}
result.registrySecrets = await getRegistrySecrets(sdk, opts.registrySecretsPath);
return result;
}
async function getTokenForPreviousRepos(logger, appId, apiEndpoint, taggedImages) {
logger.logDebug('Authorizing push...');
const { authorizePush, getPreviousRepos } = await Promise.resolve().then(() => require('./compose'));
const sdk = (0, lazy_1.getBalenaSdk)();
const previousRepos = await getPreviousRepos(sdk, logger, appId);
const token = await authorizePush(sdk, apiEndpoint, taggedImages[0].registry, _.map(taggedImages, 'repo'), previousRepos);
return token;
}
async function pushAndUpdateServiceImages(docker, token, images, afterEach) {
const { DockerProgress } = await Promise.resolve().then(() => require('docker-progress'));
const { retry } = await Promise.resolve().then(() => require('./helpers'));
const { pushProgressRenderer } = await Promise.resolve().then(() => require('./compose'));
const tty = (await Promise.resolve().then(() => require('./tty')))(process.stdout);
const opts = { authconfig: { registrytoken: token } };
const progress = new DockerProgress({ docker });
const renderer = pushProgressRenderer(tty, (0, lazy_1.getChalk)().blue('[Push]') + ' ');
const reporters = progress.aggregateProgress(images.length, renderer);
const pushImage = async (localImage, index) => {
try {
const imgName = localImage.name || '';
const imageDigest = await retry({
func: () => progress.push(imgName, reporters[index], opts),
maxAttempts: 3,
label: imgName,
initialDelayMs: 2000,
backoffScaler: 1.4,
});
if (!imageDigest) {
throw new errors_1.ExpectedError((0, lazy_1.stripIndent) `\
Unable to extract image digest (content hash) from image upload progress stream for image:
${imgName}`);
}
return imageDigest;
}
finally {
renderer.end();
}
};
const inspectAndPushImage = async ({ serviceImage, localImage, props, logs }, index) => {
try {
const [imgInfo, imgDigest] = await Promise.all([
localImage.inspect(),
pushImage(localImage, index),
]);
serviceImage.image_size = `${imgInfo.Size}`;
serviceImage.content_hash = imgDigest;
serviceImage.build_log = logs;
serviceImage.dockerfile = props.dockerfile;
serviceImage.project_type = props.projectType;
if (props.startTime) {
serviceImage.start_timestamp = props.startTime;
}
if (props.endTime) {
serviceImage.end_timestamp = props.endTime;
}
serviceImage.push_timestamp = new Date();
serviceImage.status = 'success';
}
catch (error) {
serviceImage.error_message = '' + error;
serviceImage.status = 'failed';
throw error;
}
finally {
await afterEach(serviceImage, props);
}
};
tty.hideCursor();
try {
await Promise.all(images.map(inspectAndPushImage));
}
finally {
tty.showCursor();
}
}
async function pushServiceImages(docker, logger, pineClient, taggedImages, token, skipLogUpload) {
const releaseMod = await Promise.resolve().then(() => require('@balena/compose/dist/release'));
logger.logInfo('Pushing images to registry...');
await pushAndUpdateServiceImages(docker, token, taggedImages, async function (serviceImage) {
logger.logDebug(`Saving image ${serviceImage.is_stored_at__image_location}`);
if (skipLogUpload) {
delete serviceImage.build_log;
}
await releaseMod.updateImage(pineClient, serviceImage.id, _.pick(serviceImage, [
'end_timestamp',
'project_type',
'error_message',
'build_log',
'push_timestamp',
'status',
'content_hash',
'dockerfile',
'image_size',
]));
});
}
async function deployProject(docker, sdk, logger, composition, images, appId, skipLogUpload, projectPath, isDraft) {
const releaseMod = await Promise.resolve().then(() => require('@balena/compose/dist/release'));
const { createRelease, tagServiceImages } = await Promise.resolve().then(() => require('./compose'));
const tty = (await Promise.resolve().then(() => require('./tty')))(process.stdout);
const prefix = (0, lazy_1.getChalk)().cyan('[Info]') + ' ';
const spinner = createSpinner();
const contractPath = path.join(projectPath, 'balena.yml');
const contract = await getContractContent(contractPath);
if ((contract === null || contract === void 0 ? void 0 : contract.version) && !semver.valid(contract.version)) {
throw new errors_1.ExpectedError((0, lazy_1.stripIndent) `\
Error: the version field in "${contractPath}"
is not a valid semver`);
}
const apiEndpoint = await sdk.settings.get('apiUrl');
const $release = await runSpinner(tty, spinner, `${prefix}Creating release...`, () => createRelease(sdk, logger, appId, composition, isDraft, contract === null || contract === void 0 ? void 0 : contract.version, contract));
const { client: pineClient, release, serviceImages } = $release;
try {
logger.logDebug('Tagging images...');
const taggedImages = await tagServiceImages(docker, images, serviceImages);
try {
const { awaitInterruptibleTask } = await Promise.resolve().then(() => require('./helpers'));
await awaitInterruptibleTask(async () => {
const token = await getTokenForPreviousRepos(logger, appId, apiEndpoint, taggedImages);
await pushServiceImages(docker, logger, pineClient, taggedImages, token, skipLogUpload);
});
release.status = 'success';
}
catch (err) {
release.status = 'failed';
throw err;
}
finally {
logger.logDebug('Untagging images...');
await Promise.all(taggedImages.map(({ localImage }) => localImage.remove()));
}
}
finally {
await runSpinner(tty, spinner, `${prefix}Saving release...`, async () => {
release.end_timestamp = new Date();
if (release.id != null) {
await releaseMod.updateRelease(pineClient, release.id, {
status: release.status,
end_timestamp: release.end_timestamp,
});
}
});
}
return release;
}
function createSpinner() {
const chars = '|/-\\';
let index = 0;
return () => chars[index++ % chars.length];
}
async function runSpinner(tty, spinner, msg, fn) {
const runloop = createRunLoop(function () {
tty.clearLine();
tty.writeLine(`${msg} ${spinner()}`);
tty.cursorUp();
});
runloop.onEnd = function () {
tty.clearLine();
tty.writeLine(msg);
};
try {
return await fn();
}
finally {
runloop.end();
}
}
function createRunLoop(tick) {
const timerId = setInterval(tick, 1000 / 10);
const runloop = {
onEnd() {
},
end() {
clearInterval(timerId);
return runloop.onEnd();
},
};
return runloop;
}
async function getContractContent(filePath) {
let fileContentAsString;
try {
fileContentAsString = await fs_1.promises.readFile(filePath, 'utf8');
}
catch (e) {
if (e.code === 'ENOENT') {
return;
}
throw e;
}
let asJson;
try {
asJson = yaml.load(fileContentAsString);
}
catch (err) {
throw new errors_1.ExpectedError(`Error parsing file "${filePath}":\n ${err.message}`);
}
if (!isContract(asJson)) {
throw new errors_1.ExpectedError((0, lazy_1.stripIndent) `Error: application contract in '${filePath}' needs to
define a top level "type" field with an allowed application type.
Allowed application types are: ${allowedContractTypes.join(', ')}`);
}
return asJson;
}
function isContract(obj) {
return (obj === null || obj === void 0 ? void 0 : obj.type) && allowedContractTypes.includes(obj.type);
}
function createLogStream(input) {
const split = require('split');
const stripAnsi = require('strip-ansi-stream');
return input.pipe(stripAnsi()).pipe(split());
}
function dropEmptyLinesStream() {
const through = require('through2');
return through(function (data, _enc, cb) {
const str = data.toString('utf-8');
if (str.trim()) {
this.push(str);
}
return cb();
});
}
function buildLogCapture(objectMode, buffer) {
const through = require('through2');
return through({ objectMode }, function (data, _enc, cb) {
if (data.error) {
buffer.push(`${data.error}`);
}
else if (data.progress && data.status) {
buffer.push(`${data.progress}% ${data.status}`);
}
else if (data.status) {
buffer.push(`${data.status}`);
}
else {
buffer.push(data);
}
return cb(null, data);
});
}
function buildProgressAdapter(inline) {
const through = require('through2');
const stepRegex = /^\s*Step\s+(\d+)\/(\d+)\s*: (.+)$/;
let step = '';
let numSteps = '';
let progress;
return through({ objectMode: true }, function (str, _enc, cb) {
if (str == null) {
return cb(null, str);
}
if (inline) {
return cb(null, { status: str });
}
if (!/^Successfully tagged /.test(str)) {
const match = stepRegex.exec(str);
if (match) {
step = match[1];
numSteps !== null && numSteps !== void 0 ? numSteps : (numSteps = match[2]);
str = match[3];
}
if (step) {
str = `Step ${step}/${numSteps}: ${str}`;
progress = Math.floor((parseInt(step, 10) * 100) / parseInt(numSteps, 10));
}
}
return cb(null, { status: str, progress });
});
}
function pullProgressAdapter(outStream) {
return function ({ status, id, percentage, error, errorDetail, }) {
var _a;
id || (id = '');
status || (status = '');
const isTotal = id && id.toLowerCase() === 'total';
if (status) {
status = status.replace(/^Status: /, '');
}
else if (isTotal && typeof percentage === 'number') {
status = `Pull progress: ${percentage}%`;
}
if (id && status && !isTotal) {
status = `${id}: ${status}`;
}
if (percentage === 100) {
percentage = undefined;
}
return outStream.write({
status,
progress: percentage,
error: (_a = errorDetail === null || errorDetail === void 0 ? void 0 : errorDetail.message) !== null && _a !== void 0 ? _a : error,
});
};
}
function truncateString(str, len) {
if (str.length < len) {
return str;
}
str = str.slice(0, len);
return str.slice(0, str.lastIndexOf('\n'));
}
exports.composeCliFlags = {
emulated: core_1.Flags.boolean({
description: 'Use QEMU for ARM architecture emulation during the image build',
char: 'e',
}),
dockerfile: core_1.Flags.string({
description: 'Alternative Dockerfile name/path, relative to the source folder',
}),
nologs: core_1.Flags.boolean({
description: 'Hide the image build log output (produce less verbose output)',
}),
'multi-dockerignore': core_1.Flags.boolean({
description: 'Have each service use its own .dockerignore file. See "balena help build".',
char: 'm',
}),
'noparent-check': core_1.Flags.boolean({
description: "Disable project validation check of 'docker-compose.yml' file in parent folder",
}),
'registry-secrets': core_1.Flags.string({
description: 'Path to a YAML or JSON file with passwords for a private Docker registry',
char: 'R',
}),
'noconvert-eol': core_1.Flags.boolean({
description: "Don't convert line endings from CRLF (Windows format) to LF (Unix format).",
}),
projectName: core_1.Flags.string({
description: (0, lazy_1.stripIndent) `\
Name prefix for locally built images. This is the 'projectName' portion
in 'projectName_serviceName:tag'. The default is the directory name.`,
char: 'n',
}),
};
//# sourceMappingURL=compose_ts.js.map
;