@catladder/pipeline
Version:
Panter workflow for cloud CI/CD and DevOps
130 lines (129 loc) • 6.33 kB
JavaScript
;
var __assign = this && this.__assign || function () {
__assign = Object.assign || function (t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var __rest = this && this.__rest || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function") for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) t[p[i]] = s[p[i]];
}
return t;
};
var __read = this && this.__read || function (o, n) {
var m = typeof Symbol === "function" && o[Symbol.iterator];
if (!m) return o;
var i = m.call(o),
r,
ar = [],
e;
try {
while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
} catch (error) {
e = {
error: error
};
} finally {
try {
if (r && !r.done && (m = i["return"])) m.call(i);
} finally {
if (e) throw e.error;
}
}
return ar;
};
var __spreadArray = this && this.__spreadArray || function (to, from, pack) {
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
if (ar || !(i in from)) {
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
ar[i] = from[i];
}
}
return to.concat(ar || Array.prototype.slice.call(from));
};
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.getCloudRunJobsWithNames = exports.getJobCreateScripts = exports.getDeleteJobsScripts = void 0;
var getLabels_1 = require("../../../context/getLabels");
var utils_1 = require("../../../utils");
var createArgsString_1 = require("../utils/createArgsString");
var jobName_1 = require("../utils/jobName");
var common_1 = require("./common");
var constants_1 = require("./constants");
var volumes_1 = require("./volumes");
var getJobOrServiceArgs_1 = require("../utils/getJobOrServiceArgs");
var getDeleteJobsScripts = function (context) {
var commonArgs = (0, common_1.getCommonCloudRunArgs)(context);
var commonArgsString = (0, createArgsString_1.createArgsString)(commonArgs);
var jobsWithNames = (0, exports.getCloudRunJobsWithNames)(context);
return jobsWithNames.flatMap(function (_a) {
var fullJobName = _a.fullJobName;
return [
// first delete all job executions. Otherwise delete might fail if one of those is still running
"".concat((0, common_1.gcloudRunCmd)(), " jobs executions list ").concat(commonArgsString, " --job ").concat(fullJobName, " --format=\"value(name)\" | xargs -I {} ").concat((0, common_1.gcloudRunCmd)(), " jobs executions delete {} --quiet ").concat(commonArgsString), "".concat((0, common_1.gcloudRunCmd)(), " jobs delete ").concat(fullJobName, " ").concat(commonArgsString)];
});
};
exports.getDeleteJobsScripts = getDeleteJobsScripts;
var getJobCreateScripts = function (context) {
return (0, exports.getCloudRunJobsWithNames)(context).map(function (_a, jobIndex) {
var _b, _c, _d, _e, _f;
var job = _a.job,
fullJobName = _a.fullJobName;
var commandArray = Array.isArray(job.command) ? job.command : job.command.split(" ");
var _g = (0, common_1.getCommonDeployArgs)(context),
commonImage = _g.image,
project = _g.project,
region = _g.region,
deployArgs = __rest(_g, ["image", "project", "region"]);
var commonDeployArgsString = createArgsString_1.createArgsString.apply(void 0, __spreadArray([__assign(__assign({
command: "\"".concat(commandArray.join(","), "\""),
args: (0, getJobOrServiceArgs_1.getCloudRunServiceOrJobArgsArg)(job.args),
labels: "\"".concat((0, common_1.makeLabelString)((0, getLabels_1.getLabels)(context)), ",cloud-run-job-name=$current_job_name\""),
image: "\"".concat((_b = job.image) !== null && _b !== void 0 ? _b : commonImage, "\""),
project: project,
region: region,
cpu: job.cpu,
memory: (_c = job.memory) !== null && _c !== void 0 ? _c : "512Mi",
parallelism: (_d = job.parallelism) !== null && _d !== void 0 ? _d : 1,
"task-timeout": (_e = job.timeout) !== null && _e !== void 0 ? _e : "10m",
"env-vars-file": constants_1.ENV_VARS_FILENAME,
"max-retries": (_f = job.maxRetries) !== null && _f !== void 0 ? _f : 0
}, deployArgs), {
// network
"vpc-connector": job === null || job === void 0 ? void 0 : job.vpcConnector,
"vpc-egress": job === null || job === void 0 ? void 0 : job.vpcEgress,
network: job === null || job === void 0 ? void 0 : job.network,
subnet: job === null || job === void 0 ? void 0 : job.subnet
})], __read((0, volumes_1.createVolumeConfig)(job.volumes, "job")), false));
return [jobIndex === 0 ? "exist_job_names=\"$(\n ".concat((0, common_1.gcloudRunCmd)(), " jobs list --filter='metadata.name ~ ").concat(context.env, ".*").concat(context.name, "' --format='value(name)' --limit=999 --project='").concat(project, "' --region='").concat(region, "'\n)\"") : null, "current_job_name=\"".concat(fullJobName, "\""), 'if echo "$exist_job_names" | grep -Fx "$current_job_name" >/dev/null; then', " ".concat((0, common_1.gcloudRunCmd)(), " jobs update \"$current_job_name\" ").concat(commonDeployArgsString), "else", " ".concat((0, common_1.gcloudRunCmd)(), " jobs create \"$current_job_name\" ").concat(commonDeployArgsString), "fi"].filter(utils_1.notNil).join("\n");
});
};
exports.getJobCreateScripts = getJobCreateScripts;
var getCloudRunJobsWithNames = function (context) {
var _a;
var deployConfig = (0, common_1.getCloudRunDeployConfig)(context);
var jobsWithNames = Object.entries((_a = deployConfig.jobs) !== null && _a !== void 0 ? _a : {})
// filter out disabled jobs
.filter(function (entry) {
return Boolean(entry[1]);
}).map(function (_a) {
var _b = __read(_a, 2),
jobName = _b[0],
job = _b[1];
return {
fullJobName: (0, jobName_1.getFullJobName)(context, jobName),
job: job,
jobName: jobName
};
});
return jobsWithNames;
};
exports.getCloudRunJobsWithNames = getCloudRunJobsWithNames;