@magda/docker-utils
Version:
MAGDA Docker Utilities
408 lines (403 loc) • 13.6 kB
JavaScript
#!/usr/bin/env node
// ../../scripts/create-docker-context-for-node-component.js
import childProcess from "child_process";
import fse from "fs-extra";
import path from "path";
import process2 from "process";
import yargs from "yargs";
import _ from "lodash";
import isSubDir from "is-subdir";
// ../../scripts/docker-util.js
function getVersions(local, version) {
return version || [
!local && process.env.npm_package_version ? process.env.npm_package_version : "latest"
];
}
function getName(name) {
if (name && typeof name === "string") {
return name;
}
return process.env.npm_package_config_docker_name ? process.env.npm_package_config_docker_name : process.env.npm_package_name ? "magda-" + process.env.npm_package_name.split("/")[1] : "UnnamedImage";
}
function getTags(tag, local, repository, version, name) {
if (tag === "auto") {
return getVersions(local, version).map((version2) => {
const tagPrefix = getRepository(local, repository);
const imageName = getName(name);
return tagPrefix + imageName + ":" + version2;
});
} else {
return tag ? [tag] : [];
}
}
function getRepository(local, repository) {
return repository && repository + "/" || (local ? "localhost:5000/" : "");
}
// ../../node_modules/@magda/esm-utils/dist/esmUtils.js
import { fileURLToPath } from "url";
import { dirname } from "path";
import { createRequire } from "module";
function callsites() {
const _prepareStackTrace = Error.prepareStackTrace;
try {
let result = [];
Error.prepareStackTrace = (_2, callSites) => {
const callSitesWithoutCurrent = callSites.slice(1);
result = callSitesWithoutCurrent;
return callSitesWithoutCurrent;
};
new Error().stack;
return result;
} finally {
Error.prepareStackTrace = _prepareStackTrace;
}
}
function callerCallsite({ depth = 0 } = {}) {
const callers = [];
const callerFileSet = /* @__PURE__ */ new Set();
for (const callsite of callsites()) {
const fileName = callsite.getFileName();
const hasReceiver = callsite.getTypeName() !== null && fileName !== null;
if (!callerFileSet.has(fileName)) {
callerFileSet.add(fileName);
callers.unshift(callsite);
}
if (hasReceiver) {
return callers[depth];
}
}
}
function callerpath({ depth = 0 } = {}) {
const callsite = callerCallsite({ depth });
return callsite && callsite.getFileName();
}
function require2(id) {
const requireFrom = createRequire(callerpath({ depth: 1 }));
return requireFrom(id);
}
var __dirname = () => dirname(fileURLToPath(callerpath({ depth: 1 })));
// ../../scripts/create-docker-context-for-node-component.js
var __dirname2 = __dirname();
var packageDependencyDataCache = {};
var argv = yargs.options({
build: {
description: "Pipe the Docker context straight to Docker.",
type: "boolean",
default: false
},
tag: {
description: 'The tag to pass to "docker build". This parameter is only used if --build is specified. If the value of this parameter is `auto`, a tag name is automatically created from NPM configuration.',
type: "string",
default: "auto"
},
repository: {
description: "The repository to use in auto tag generation. Will default to '', i.e. dockerhub unless --local is set. Requires --tag=auto",
type: "string",
default: process2.env.MAGDA_DOCKER_REPOSITORY
},
name: {
description: "The package name to use in auto tag generation. Will default to ''. Used to override the docker nanme config in package.json during the auto tagging. Requires --tag=auto",
type: "string",
default: process2.env.MAGDA_DOCKER_NAME
},
version: {
description: "The version(s) to use in auto tag generation. Will default to the current version in package.json. Requires --tag=auto",
type: "string",
array: true,
default: process2.env.MAGDA_DOCKER_VERSION
},
output: {
description: "The output path and filename for the Docker context .tar file.",
type: "string"
},
local: {
description: "Build for a local Kubernetes container registry. This parameter is only used if --build is specified.",
type: "boolean",
default: false
},
push: {
description: "Push the build image to the docker registry. This parameter is only used if --build is specified.",
type: "boolean",
default: false
},
platform: {
description: "A list of platform that the docker image build should target. Specify this value will enable multi-arch image build.",
type: "string"
},
noCache: {
description: "Disable the cache during the docker image build.",
type: "boolean",
default: false
},
cacheFromVersion: {
description: "Version to cache from when building, using the --cache-from field in docker. Will use the same repository and name. Using this options causes the image to be pulled before build.",
type: "string"
}
}).version(false).array("version").help().argv;
if (!argv.build && !argv.output) {
console.log("Either --build or --output <filename> must be specified.");
process2.exit(1);
}
if (argv.platform && !argv.push) {
console.log(
"When --platform is specified, --push must be specified as well as multi-arch image can only be pushed to remote registry."
);
process2.exit(1);
}
if (argv.noCache && argv.cacheFromVersion) {
console.log("When --noCache=true, --cacheFromVersion can't be specified.");
process2.exit(1);
}
var componentSrcDir = path.resolve(process2.cwd());
var dockerContextDir = fse.mkdtempSync(
path.resolve(__dirname2, "..", "docker-context-")
);
var componentDestDir = path.resolve(dockerContextDir, "component");
fse.emptyDirSync(dockerContextDir);
fse.ensureDirSync(componentDestDir);
preparePackage(componentSrcDir, componentDestDir);
var tar = process2.platform === "darwin" ? "gtar" : "tar";
var env = Object.assign({}, process2.env);
var extraParameters = [];
if (env.ConEmuANSI === "ON") {
env.ConEmuANSI = "OFF";
extraParameters.push("-cur_console:i");
}
updateDockerFile(componentSrcDir, componentDestDir);
if (argv.build) {
const cacheFromImage = argv.cacheFromVersion && getRepository(argv.local, argv.repository) + getName(argv.name) + ":" + argv.cacheFromVersion;
if (cacheFromImage) {
const dockerPullProcess = childProcess.spawnSync(
"docker",
[...extraParameters, "pull", cacheFromImage],
{
stdio: "inherit",
env
}
);
wrapConsoleOutput(dockerPullProcess);
}
const tarProcess = childProcess.spawn(
tar,
[...extraParameters, "--dereference", "-czf", "-", "*"],
{
cwd: dockerContextDir,
stdio: ["inherit", "pipe", "inherit"],
env,
shell: true
}
);
const tags = getTags(
argv.tag,
argv.local,
argv.repository,
argv.version,
argv.name
);
const tagArgs = tags.map((tag) => ["-t", tag]).reduce((soFar, tagArgs2) => soFar.concat(tagArgs2), []);
const cacheFromArgs = cacheFromImage ? ["--cache-from", cacheFromImage] : [];
const dockerProcess = childProcess.spawn(
"docker",
[
...extraParameters,
...argv.platform ? ["buildx"] : [],
"build",
...tagArgs,
...cacheFromArgs,
...argv.noCache ? ["--no-cache"] : [],
...argv.platform ? ["--platform", argv.platform, "--push"] : [],
"-f",
`./component/Dockerfile`,
"-"
],
{
stdio: ["pipe", "inherit", "inherit"],
env
}
);
wrapConsoleOutput(dockerProcess);
dockerProcess.on("close", (code) => {
fse.removeSync(dockerContextDir);
if (code === 0 && argv.push && !argv.platform) {
if (tags.length === 0) {
console.error("Can not push an image without a tag.");
process2.exit(1);
}
tags.every((tag) => {
const process3 = childProcess.spawnSync(
"docker",
["push", tag],
{
stdio: "inherit"
}
);
code = process3.status;
return code === 0;
});
}
process2.exit(code);
});
tarProcess.on("close", (code) => {
dockerProcess.stdin.end();
});
tarProcess.stdout.on("data", (data) => {
dockerProcess.stdin.write(data);
});
} else if (argv.output) {
const outputPath = path.resolve(process2.cwd(), argv.output);
const outputTar = fse.openSync(outputPath, "w", 420);
const tarProcess = childProcess.spawn(
tar,
["--dereference", "-czf", "-", "*"],
{
cwd: dockerContextDir,
stdio: ["inherit", outputTar, "inherit"],
env,
shell: true
}
);
tarProcess.on("close", (code) => {
fse.closeSync(outputTar);
console.log(tarProcess.status);
fse.removeSync(dockerContextDir);
});
}
function updateDockerFile(sourceDir, destDir) {
const tags = getVersions(argv.local, argv.version);
const repository = getRepository(argv.local, argv.repository);
const dockerFileContents = fse.readFileSync(
path.resolve(sourceDir, "Dockerfile"),
"utf-8"
);
const replacedDockerFileContents = dockerFileContents.replace(
/FROM .*(magda-[^:\s\/]+)(:[^\s]+)/,
"FROM " + repository + "$1" + (tags[0] ? ":" + tags[0] : "$2")
);
fse.writeFileSync(
path.resolve(destDir, "Dockerfile"),
replacedDockerFileContents,
"utf-8"
);
}
function preparePackage(packageDir, destDir) {
const packageJson = require2(path.join(packageDir, "package.json"));
const dockerIncludesFromPackageJson = packageJson.config && packageJson.config.docker && packageJson.config.docker.include;
let dockerIncludes;
if (!dockerIncludesFromPackageJson) {
console.log(
`WARNING: Package ${packageDir} does not have a config.docker.include key in package.json, so all of its files will be included in the docker image.`
);
dockerIncludes = fse.readdirSync(packageDir);
} else if (dockerIncludesFromPackageJson.trim() === "*") {
dockerIncludes = fse.readdirSync(packageDir);
} else {
if (dockerIncludesFromPackageJson.indexOf("*") >= 0) {
throw new Error(
"Sorry, wildcards are not currently supported in config.docker.include."
);
}
dockerIncludes = dockerIncludesFromPackageJson.split(" ").filter((include) => include.length > 0);
}
dockerIncludes.filter((include) => include !== "Dockerfile").forEach(function(include) {
const src = path.resolve(packageDir, include);
const dest = path.resolve(destDir, include);
if (include === "node_modules") {
fse.ensureDirSync(dest);
const env2 = Object.create(process2.env);
env2.NODE_ENV = "production";
const productionPackages = _.uniqBy(
getPackageList(packageDir, path.resolve(packageDir, "..")),
(pkg) => pkg.path
);
prepareNodeModules(src, dest, productionPackages);
return;
}
try {
const type = fse.statSync(src).isFile() ? "file" : "junction";
fse.ensureSymlinkSync(src, dest, type);
} catch (e) {
fse.copySync(src, dest);
}
});
}
function prepareNodeModules(packageDir, destDir, productionPackages) {
productionPackages.forEach((src) => {
const relativePath = path.relative(packageDir, src.path);
const dest = path.resolve(destDir, relativePath);
const srcPath = path.resolve(packageDir, relativePath);
try {
const stat = fse.lstatSync(srcPath);
const type = stat.isFile() ? "file" : "junction";
fse.ensureSymlinkSync(srcPath, dest, type);
} catch (e) {
if (e?.code === "EEXIST") {
return;
}
throw e;
}
});
}
function getPackageList(packagePath, packageSearchRoot, resolvedSoFar = {}) {
const dependencies = getPackageDependencies(packagePath);
const result = [];
if (!dependencies || !dependencies.length) {
return result;
}
dependencies.forEach(function(dependencyName) {
const dependencyNamePath = dependencyName.replace(/\//g, path.sep);
let currentBaseDir = packagePath;
let dependencyDir;
do {
dependencyDir = path.resolve(
currentBaseDir,
"node_modules",
dependencyNamePath
);
if (currentBaseDir === packageSearchRoot || isSubDir(currentBaseDir, packageSearchRoot)) {
break;
}
currentBaseDir = path.resolve(currentBaseDir, "..");
} while (!fse.existsSync(dependencyDir));
if (!fse.existsSync(dependencyDir)) {
throw new Error(
"Could not find path for " + dependencyName + " @ " + packagePath
);
}
if (!resolvedSoFar[dependencyDir]) {
result.push({ name: dependencyName, path: dependencyDir });
const childPackageResult = getPackageList(
dependencyDir,
packageSearchRoot,
{ ...resolvedSoFar, [dependencyDir]: true }
);
Array.prototype.push.apply(result, childPackageResult);
}
});
return result;
}
function getPackageDependencies(packagePath) {
const packageJsonPath = path.resolve(packagePath, "package.json");
if (packageDependencyDataCache[packageJsonPath]) {
return packageDependencyDataCache[packageJsonPath];
}
const pkgData = fse.readJSONSync(packageJsonPath);
const depData = pkgData["dependencies"];
if (!depData) {
packageDependencyDataCache[packageJsonPath] = [];
} else {
packageDependencyDataCache[packageJsonPath] = Object.keys(depData);
}
return packageDependencyDataCache[packageJsonPath];
}
function wrapConsoleOutput(process3) {
if (process3.stdout) {
process3.stdout.on("data", (data) => {
console.log(data.toString());
});
}
if (process3.stderr) {
process3.stderr.on("data", (data) => {
console.error(data.toString());
});
}
}