@argos-ci/core
Version:
Node.js SDK for visual testing with Argos.
1,510 lines (1,475 loc) • 42.4 kB
JavaScript
// src/config.ts
import convict from "convict";
// src/ci-environment/git.ts
import { execSync } from "child_process";
// src/debug.ts
import createDebug from "debug";
var KEY = "@argos-ci/core";
var debug = createDebug(KEY);
var isDebugEnabled = createDebug.enabled(KEY);
var debugTime = (arg) => {
if (isDebugEnabled) {
console.time(arg);
}
};
var debugTimeEnd = (arg) => {
if (isDebugEnabled) {
console.timeEnd(arg);
}
};
// src/ci-environment/git.ts
function checkIsGitRepository() {
try {
return execSync("git rev-parse --is-inside-work-tree").toString().trim() === "true";
} catch {
return false;
}
}
function head() {
try {
return execSync("git rev-parse HEAD").toString().trim();
} catch {
return null;
}
}
function branch() {
try {
const headRef = execSync("git rev-parse --abbrev-ref HEAD").toString().trim();
if (headRef === "HEAD") {
return null;
}
return headRef;
} catch {
return null;
}
}
function getRepositoryURL() {
try {
const url = execSync("git config --get remote.origin.url").toString().trim();
return url;
} catch {
return null;
}
}
function gitMergeBase(input) {
try {
return execSync(`git merge-base ${input.head} ${input.base}`).toString().trim();
} catch (error) {
if (checkIsExecError(error) && error.status === 1 && error.stderr.toString() === "") {
return null;
}
throw error;
}
}
function gitFetch(input) {
execSync(
`git fetch --force --update-head-ok --depth ${input.depth} origin ${input.ref}:${input.target}`
);
}
function checkIsExecError(error) {
return error instanceof Error && "status" in error && typeof error.status === "number" && "stderr" in error && Buffer.isBuffer(error.stderr);
}
function getMergeBaseCommitSha(input) {
let depth = 200;
const argosBaseRef = `argos/${input.base}`;
const argosHeadRef = `argos/${input.head}`;
while (depth < 1e3) {
gitFetch({ ref: input.head, depth, target: argosHeadRef });
gitFetch({ ref: input.base, depth, target: argosBaseRef });
const mergeBase = gitMergeBase({
base: argosBaseRef,
head: argosHeadRef
});
if (mergeBase) {
return mergeBase;
}
depth += 200;
}
if (isDebugEnabled) {
const headShas = listShas(argosHeadRef);
const baseShas = listShas(argosBaseRef);
debug(
`No merge base found for ${input.head} and ${input.base} with depth ${depth}`
);
debug(
`Found ${headShas.length} commits in ${input.head}: ${headShas.join(", ")}`
);
debug(
`Found ${baseShas.length} commits in ${input.base}: ${baseShas.join(", ")}`
);
}
return null;
}
function listShas(path, maxCount) {
const maxCountArg = maxCount ? `--max-count=${maxCount}` : "";
const raw = execSync(`git log --format="%H" ${maxCountArg} ${path}`.trim());
const shas = raw.toString().trim().split("\n");
return shas;
}
function listParentCommits(input) {
const limit = 200;
try {
execSync(`git fetch --depth=${limit} origin ${input.sha}`);
} catch (error) {
if (error instanceof Error && error.message.includes("not our ref")) {
return [];
}
}
return listShas(input.sha, limit);
}
// src/ci-environment/services/bitrise.ts
function getPrNumber(context) {
const { env } = context;
return env.BITRISE_PULL_REQUEST ? Number(env.BITRISE_PULL_REQUEST) : null;
}
function getRepository(context) {
const { env } = context;
if (env.BITRISEIO_GIT_REPOSITORY_OWNER && env.BITRISEIO_GIT_REPOSITORY_SLUG) {
return `${env.BITRISEIO_GIT_REPOSITORY_OWNER}/${env.BITRISEIO_GIT_REPOSITORY_SLUG}`;
}
return null;
}
var service = {
name: "Bitrise",
key: "bitrise",
detect: ({ env }) => Boolean(env.BITRISE_IO),
config: (context) => {
const { env } = context;
const repository = getRepository(context);
return {
commit: env.BITRISE_GIT_COMMIT || null,
branch: env.BITRISE_GIT_BRANCH || null,
repository,
originalRepository: repository,
jobId: null,
runId: null,
runAttempt: null,
prNumber: getPrNumber({ env }),
prHeadCommit: null,
prBaseBranch: null,
nonce: env.BITRISEIO_PIPELINE_ID || null,
mergeQueue: false
};
},
getMergeBaseCommitSha,
listParentCommits
};
var bitrise_default = service;
// src/util/url.ts
function getRepositoryNameFromURL(url) {
const sshMatch = url.match(/^git@[^:]+:([^/]+)\/(.+?)(?:\.git)?$/);
if (sshMatch && sshMatch[1] && sshMatch[2]) {
return `${sshMatch[1]}/${sshMatch[2]}`;
}
const httpsMatch = url.match(
/^(?:https?|git):\/\/[^/]+\/([^/]+)\/(.+?)(?:\.git)?$/
);
if (httpsMatch && httpsMatch[1] && httpsMatch[2]) {
return `${httpsMatch[1]}/${httpsMatch[2]}`;
}
return null;
}
// src/ci-environment/services/buildkite.ts
function getRepository2(context) {
const { env } = context;
if (env.BUILDKITE_REPO) {
return getRepositoryNameFromURL(env.BUILDKITE_REPO);
}
return null;
}
var service2 = {
name: "Buildkite",
key: "buildkite",
detect: ({ env }) => Boolean(env.BUILDKITE),
config: (context) => {
const { env } = context;
const repository = getRepository2(context);
return {
// Buildkite doesn't work well so we fallback to git to ensure we have commit and branch
commit: env.BUILDKITE_COMMIT || head() || null,
branch: env.BUILDKITE_BRANCH || branch() || null,
repository,
originalRepository: repository,
jobId: null,
runId: null,
runAttempt: null,
prNumber: env.BUILDKITE_PULL_REQUEST ? Number(env.BUILDKITE_PULL_REQUEST) : null,
prHeadCommit: null,
prBaseBranch: null,
nonce: env.BUILDKITE_BUILD_ID || null,
mergeQueue: false
};
},
getMergeBaseCommitSha,
listParentCommits
};
var buildkite_default = service2;
// src/ci-environment/services/heroku.ts
var service3 = {
name: "Heroku",
key: "heroku",
detect: ({ env }) => Boolean(env.HEROKU_TEST_RUN_ID),
config: ({ env }) => ({
commit: env.HEROKU_TEST_RUN_COMMIT_VERSION || null,
branch: env.HEROKU_TEST_RUN_BRANCH || null,
owner: null,
repository: null,
originalRepository: null,
jobId: null,
runId: null,
runAttempt: null,
prNumber: null,
prHeadCommit: null,
prBaseBranch: null,
nonce: env.HEROKU_TEST_RUN_ID || null,
mergeQueue: false
}),
getMergeBaseCommitSha,
listParentCommits
};
var heroku_default = service3;
// src/ci-environment/services/github-actions.ts
import { existsSync, readFileSync } from "fs";
// src/ci-environment/github.ts
function getGitHubRepository(ctx) {
return ctx.env.GITHUB_REPOSITORY || null;
}
function assertGitHubRepository(ctx) {
const repo = getGitHubRepository(ctx);
if (!repo) {
throw new Error("GITHUB_REPOSITORY is missing");
}
return repo;
}
function getGitHubToken({ env }) {
if (!env.GITHUB_TOKEN) {
if (!env.DISABLE_GITHUB_TOKEN_WARNING) {
console.log(
`
Argos couldn\u2019t find a relevant pull request in the current environment.
To resolve this, Argos requires a GITHUB_TOKEN to fetch the pull request associated with the head SHA. Please ensure the following environment variable is added:
GITHUB_TOKEN: \${{ secrets.GITHUB_TOKEN }}
For more details, check out the documentation: Read more at https://argos-ci.com/docs/run-on-preview-deployment
If you want to disable this warning, you can set the following environment variable:
DISABLE_GITHUB_TOKEN_WARNING: true
`.trim()
);
}
return null;
}
return env.GITHUB_TOKEN;
}
async function fetchGitHubAPI(ctx, url) {
const githubToken = getGitHubToken(ctx);
if (!githubToken) {
return null;
}
const response = await fetch(url, {
headers: {
Accept: "application/vnd.github+json",
Authorization: `Bearer ${githubToken}`,
"X-GitHub-Api-Version": "2022-11-28"
},
signal: AbortSignal.timeout(1e4)
});
return response;
}
var GITHUB_API_BASE_URL = "https://api.github.com";
async function getPullRequestFromHeadSha(ctx, sha) {
debug(`Fetching pull request details from head sha: ${sha}`);
const githubRepository = assertGitHubRepository(ctx);
const url = new URL(`/repos/${githubRepository}/pulls`, GITHUB_API_BASE_URL);
url.search = new URLSearchParams({
state: "open",
sort: "updated",
per_page: "30",
page: "1"
}).toString();
const response = await fetchGitHubAPI(ctx, url);
if (!response) {
return null;
}
if (!response.ok) {
throw new Error(
`Non-OK response (status: ${response.status}) while fetching pull request details from head sha (${sha})`
);
}
const result = await response.json();
if (result.length === 0) {
debug("No results, no pull request found");
return null;
}
const matchingPr = result.find((pr) => pr.head.sha === sha);
if (matchingPr) {
debug("Pull request found", matchingPr);
return matchingPr;
}
debug("No matching pull request found");
return null;
}
async function getPullRequestFromPrNumber(ctx, prNumber) {
debug(`Fetching pull request #${prNumber}`);
const githubRepository = assertGitHubRepository(ctx);
const response = await fetchGitHubAPI(
ctx,
new URL(
`/repos/${githubRepository}/pulls/${prNumber}`,
GITHUB_API_BASE_URL
)
);
if (!response) {
return null;
}
if (response.status === 404) {
debug(
"No pull request found, pr detection from branch was probably a mistake"
);
return null;
}
if (!response.ok) {
throw new Error(
`Non-OK response (status: ${response.status}) while fetching pull request #${prNumber}`
);
}
const result = await response.json();
return result;
}
function getPRNumberFromMergeGroupBranch(branch2) {
const prMatch = /queue\/[^/]*\/pr-(\d+)-/.exec(branch2);
if (prMatch) {
const prNumber = Number(prMatch[1]);
return prNumber;
}
return null;
}
// src/ci-environment/services/github-actions.ts
function readEventPayload({ env }) {
if (!env.GITHUB_EVENT_PATH) {
return null;
}
if (!existsSync(env.GITHUB_EVENT_PATH)) {
return null;
}
return JSON.parse(readFileSync(env.GITHUB_EVENT_PATH, "utf-8"));
}
function getVercelDeploymentPayload(payload) {
if (process.env.GITHUB_EVENT_NAME === "repository_dispatch" && payload && "action" in payload && payload.action === "vercel.deployment.success") {
return payload;
}
return null;
}
function getMergeGroupPayload(payload) {
if (payload && process.env.GITHUB_EVENT_NAME === "merge_group" && "action" in payload && payload.action === "checks_requested") {
return payload;
}
return null;
}
function getBranchFromContext(context) {
const { env } = context;
if (env.GITHUB_HEAD_REF) {
return env.GITHUB_HEAD_REF;
}
if (!env.GITHUB_REF) {
return null;
}
const branchRegex = /refs\/heads\/(.*)/;
const matches = branchRegex.exec(env.GITHUB_REF);
return matches?.[1] ?? null;
}
function getBranchFromPayload(payload) {
if ("workflow_run" in payload && payload.workflow_run) {
return payload.workflow_run.head_branch;
}
if ("deployment" in payload && payload.deployment) {
return payload.deployment.environment;
}
return null;
}
function getBranch(args) {
const { payload, mergeGroupPayload, vercelPayload, pullRequest, context } = args;
if (mergeGroupPayload && pullRequest?.head.ref) {
return pullRequest.head.ref;
}
if (vercelPayload) {
return vercelPayload.client_payload.git.ref;
}
if (payload) {
const fromPayload = getBranchFromPayload(payload);
if (fromPayload) {
return fromPayload;
}
}
const fromContext = getBranchFromContext(context);
if (fromContext) {
return fromContext;
}
if (pullRequest) {
return pullRequest.head.ref;
}
return null;
}
function getRepository3(context, payload) {
if (payload && "pull_request" in payload && payload.pull_request) {
const pr = payload.pull_request;
if (pr.head && pr.head.repo && pr.head.repo.full_name) {
return pr.head.repo.full_name;
}
}
return getGitHubRepository(context);
}
function getSha(context, vercelPayload) {
if (vercelPayload) {
return vercelPayload.client_payload.git.sha;
}
if (!context.env.GITHUB_SHA) {
throw new Error(`GITHUB_SHA is missing`);
}
return context.env.GITHUB_SHA;
}
function getPullRequestFromPayload(payload) {
if ("pull_request" in payload && payload.pull_request && payload.pull_request) {
return payload.pull_request;
}
if ("workflow_run" in payload && payload.workflow_run && payload.workflow_run.pull_requests[0]) {
return payload.workflow_run.pull_requests[0];
}
if ("check_run" in payload && payload.check_run && "pull_requests" in payload.check_run && payload.check_run.pull_requests[0]) {
return payload.check_run.pull_requests[0];
}
return null;
}
async function getPullRequest(args) {
const { payload, vercelPayload, mergeGroupPayload, context, sha } = args;
if (vercelPayload || !payload) {
return getPullRequestFromHeadSha(context, sha);
}
if (mergeGroupPayload) {
const prNumber = getPRNumberFromMergeGroupBranch(
mergeGroupPayload.merge_group.head_ref
);
if (!prNumber) {
debug(
`No PR found from merge group head ref: ${mergeGroupPayload.merge_group.head_ref}`
);
return null;
}
debug(
`PR #${prNumber} found from merge group head ref (${mergeGroupPayload.merge_group.head_ref})`
);
return getPullRequestFromPrNumber(context, prNumber);
}
return getPullRequestFromPayload(payload);
}
var service4 = {
name: "GitHub Actions",
key: "github-actions",
detect: (context) => Boolean(context.env.GITHUB_ACTIONS),
config: async (context) => {
const { env } = context;
const payload = readEventPayload(context);
const vercelPayload = getVercelDeploymentPayload(payload);
const mergeGroupPayload = getMergeGroupPayload(payload);
const sha = getSha(context, vercelPayload);
const pullRequest = await getPullRequest({
payload,
vercelPayload,
mergeGroupPayload,
sha,
context
});
const branch2 = getBranch({
payload,
vercelPayload,
mergeGroupPayload,
context,
pullRequest
});
return {
commit: sha,
repository: getRepository3(context, payload),
originalRepository: getGitHubRepository(context),
jobId: env.GITHUB_JOB || null,
runId: env.GITHUB_RUN_ID || null,
runAttempt: env.GITHUB_RUN_ATTEMPT ? Number(env.GITHUB_RUN_ATTEMPT) : null,
nonce: `${env.GITHUB_RUN_ID}-${env.GITHUB_RUN_ATTEMPT}`,
branch: branch2,
prNumber: pullRequest?.number || null,
prHeadCommit: pullRequest?.head.sha ?? null,
prBaseBranch: pullRequest?.base.ref ?? null,
mergeQueue: Boolean(mergeGroupPayload)
};
},
getMergeBaseCommitSha,
listParentCommits
};
var github_actions_default = service4;
// src/ci-environment/services/circleci.ts
function getPrNumber2(context) {
const { env } = context;
const matches = /pull\/(\d+)/.exec(env.CIRCLE_PULL_REQUEST || "");
if (matches) {
return Number(matches[1]);
}
return null;
}
function getRepository4(context) {
const { env } = context;
if (env.CIRCLE_PR_REPONAME && env.CIRCLE_PR_USERNAME) {
return `${env.CIRCLE_PR_USERNAME}/${env.CIRCLE_PR_REPONAME}`;
}
return getOriginalRepository(context);
}
function getOriginalRepository(context) {
const { env } = context;
if (env.CIRCLE_PROJECT_USERNAME && env.CIRCLE_PROJECT_REPONAME) {
return `${env.CIRCLE_PROJECT_USERNAME}/${env.CIRCLE_PROJECT_REPONAME}`;
}
return null;
}
var service5 = {
name: "CircleCI",
key: "circleci",
detect: ({ env }) => Boolean(env.CIRCLECI),
config: (context) => {
const { env } = context;
return {
commit: env.CIRCLE_SHA1 || null,
branch: env.CIRCLE_BRANCH || null,
repository: getRepository4(context),
originalRepository: getOriginalRepository(context),
jobId: null,
runId: null,
runAttempt: null,
prNumber: getPrNumber2({ env }),
prHeadCommit: null,
prBaseBranch: null,
nonce: env.CIRCLE_WORKFLOW_ID || env.CIRCLE_BUILD_NUM || null,
mergeQueue: false
};
},
getMergeBaseCommitSha,
listParentCommits
};
var circleci_default = service5;
// src/ci-environment/services/travis.ts
function getRepository5(context) {
const { env } = context;
if (env.TRAVIS_PULL_REQUEST_SLUG) {
return env.TRAVIS_PULL_REQUEST_SLUG;
}
return getOriginalRepository2(context);
}
function getOriginalRepository2(context) {
const { env } = context;
return env.TRAVIS_REPO_SLUG || null;
}
function getPrNumber3(context) {
const { env } = context;
if (env.TRAVIS_PULL_REQUEST) {
return Number(env.TRAVIS_PULL_REQUEST);
}
return null;
}
var service6 = {
name: "Travis CI",
key: "travis",
detect: ({ env }) => Boolean(env.TRAVIS),
config: (ctx) => {
const { env } = ctx;
return {
commit: env.TRAVIS_COMMIT || null,
branch: env.TRAVIS_BRANCH || null,
repository: getRepository5(ctx),
originalRepository: getOriginalRepository2(ctx),
jobId: null,
runId: null,
runAttempt: null,
prNumber: getPrNumber3(ctx),
prHeadCommit: null,
prBaseBranch: null,
nonce: env.TRAVIS_BUILD_ID || null,
mergeQueue: false
};
},
getMergeBaseCommitSha,
listParentCommits
};
var travis_default = service6;
// src/ci-environment/services/gitlab.ts
function getRepository6(context) {
const { env } = context;
if (env.CI_MERGE_REQUEST_PROJECT_PATH) {
return env.CI_MERGE_REQUEST_PROJECT_PATH;
}
return getOriginalRepository3(context);
}
function getOriginalRepository3(context) {
const { env } = context;
return env.CI_PROJECT_PATH || null;
}
var service7 = {
name: "GitLab",
key: "gitlab",
detect: ({ env }) => env.GITLAB_CI === "true",
config: (context) => {
const { env } = context;
return {
commit: env.CI_COMMIT_SHA || null,
branch: env.CI_COMMIT_REF_NAME || null,
repository: getRepository6(context),
originalRepository: getOriginalRepository3(context),
jobId: null,
runId: null,
runAttempt: null,
prNumber: null,
prHeadCommit: null,
prBaseBranch: null,
nonce: env.CI_PIPELINE_ID || null,
mergeQueue: false
};
},
getMergeBaseCommitSha,
listParentCommits
};
var gitlab_default = service7;
// src/ci-environment/services/git.ts
function getRepository7() {
const repositoryURL = getRepositoryURL();
if (!repositoryURL) {
return null;
}
return getRepositoryNameFromURL(repositoryURL);
}
var service8 = {
name: "Git",
key: "git",
detect: () => checkIsGitRepository(),
config: () => {
const repository = getRepository7();
return {
commit: head() || null,
branch: branch() || null,
repository,
originalRepository: repository,
jobId: null,
runId: null,
runAttempt: null,
prNumber: null,
prHeadCommit: null,
prBaseBranch: null,
nonce: null,
mergeQueue: false
};
},
getMergeBaseCommitSha,
listParentCommits
};
var git_default = service8;
// src/ci-environment/index.ts
var services = [
heroku_default,
github_actions_default,
circleci_default,
travis_default,
buildkite_default,
gitlab_default,
bitrise_default,
git_default
];
function createContext() {
return { env: process.env };
}
function getCiService(context) {
return services.find((service9) => service9.detect(context));
}
function getMergeBaseCommitSha2(input) {
const context = createContext();
const service9 = getCiService(context);
if (!service9) {
return null;
}
return service9.getMergeBaseCommitSha(input, context);
}
function listParentCommits2(input) {
const context = createContext();
const service9 = getCiService(context);
if (!service9) {
return null;
}
return service9.listParentCommits(input, context);
}
async function getCiEnvironment() {
const context = createContext();
debug("Detecting CI environment", context);
const service9 = getCiService(context);
if (service9) {
debug("Internal service matched", service9.name);
const variables = await service9.config(context);
const ciEnvironment = {
name: service9.name,
key: service9.key,
...variables
};
debug("CI environment", ciEnvironment);
return ciEnvironment;
}
return null;
}
// src/config.ts
var mustBeApiBaseUrl = (value) => {
const URL_REGEX = /https?:\/\/(www\.)?[-a-zA-Z0-9@:%._+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_+.~#?&//=]*)/;
if (!URL_REGEX.test(value)) {
throw new Error("Invalid Argos API base URL");
}
};
var mustBeCommit = (value) => {
const SHA1_REGEX = /^[0-9a-f]{40}$/;
if (!SHA1_REGEX.test(value)) {
const SHA1_SHORT_REGEX = /^[0-9a-f]{7}$/;
if (SHA1_SHORT_REGEX.test(value)) {
throw new Error("Short SHA1 is not allowed");
}
throw new Error("Invalid commit");
}
};
var mustBeArgosToken = (value) => {
if (value && value.length !== 40) {
throw new Error("Invalid Argos repository token (must be 40 characters)");
}
};
var minInteger = (min) => (value) => {
if (!Number.isInteger(value)) {
throw new Error("must be an integer");
}
if (value < min) {
throw new Error(`must be at least ${min}`);
}
};
var toInt = (value) => {
if (value === "") {
return null;
}
const num = Number(value);
if (!Number.isInteger(num) || Number.isNaN(num)) {
return num;
}
return num;
};
var toFloat = (value) => parseFloat(value);
convict.addFormat({
name: "parallel-total",
validate: minInteger(-1),
coerce: toInt
});
convict.addFormat({
name: "parallel-index",
validate: minInteger(1),
coerce: toInt
});
convict.addFormat({
name: "float-percent",
validate: (val) => {
if (val !== 0 && (!val || val > 1 || val < 0)) {
throw new Error("Must be a float between 0 and 1, inclusive.");
}
},
coerce: toFloat
});
var schema = {
apiBaseUrl: {
env: "ARGOS_API_BASE_URL",
default: "https://api.argos-ci.com/v2/",
format: mustBeApiBaseUrl
},
commit: {
env: "ARGOS_COMMIT",
default: null,
format: mustBeCommit
},
branch: {
env: "ARGOS_BRANCH",
default: null,
format: String
},
token: {
env: "ARGOS_TOKEN",
default: null,
format: mustBeArgosToken
},
buildName: {
env: "ARGOS_BUILD_NAME",
default: null,
format: String,
nullable: true
},
mode: {
env: "ARGOS_MODE",
format: ["ci", "monitoring"],
default: null,
nullable: true
},
prNumber: {
env: "ARGOS_PR_NUMBER",
format: Number,
default: null,
nullable: true
},
prHeadCommit: {
env: "ARGOS_PR_HEAD_COMMIT",
format: String,
default: null,
nullable: true
},
prBaseBranch: {
env: "ARGOS_PR_BASE_BRANCH",
format: String,
default: null,
nullable: true
},
parallel: {
env: "ARGOS_PARALLEL",
default: false,
format: Boolean
},
parallelNonce: {
env: "ARGOS_PARALLEL_NONCE",
format: String,
default: null,
nullable: true
},
parallelIndex: {
env: "ARGOS_PARALLEL_INDEX",
format: "parallel-index",
default: null,
nullable: true
},
parallelTotal: {
env: "ARGOS_PARALLEL_TOTAL",
format: "parallel-total",
default: null,
nullable: true
},
referenceBranch: {
env: "ARGOS_REFERENCE_BRANCH",
format: String,
default: null,
nullable: true
},
referenceCommit: {
env: "ARGOS_REFERENCE_COMMIT",
format: String,
default: null,
nullable: true
},
jobId: {
format: String,
default: null,
nullable: true
},
runId: {
format: String,
default: null,
nullable: true
},
runAttempt: {
format: "nat",
default: null,
nullable: true
},
repository: {
format: String,
default: null,
nullable: true
},
originalRepository: {
format: String,
default: null,
nullable: true
},
ciProvider: {
format: String,
default: null,
nullable: true
},
threshold: {
env: "ARGOS_THRESHOLD",
format: "float-percent",
default: null,
nullable: true
},
previewBaseUrl: {
env: "ARGOS_PREVIEW_BASE_URL",
format: String,
default: null,
nullable: true
},
skipped: {
env: "ARGOS_SKIPPED",
format: Boolean,
default: false
},
mergeQueue: {
format: Boolean,
default: false
},
subset: {
env: "ARGOS_SUBSET",
format: Boolean,
default: false
}
};
function createConfig() {
return convict(schema, { args: [], env: {} });
}
function getDefaultConfig() {
return Object.entries(schema).reduce(
(cfg, [key, entry]) => {
cfg[key] = "env" in entry && entry.env && process.env[entry.env] ? process.env[entry.env] : entry.default;
return cfg;
},
{}
);
}
async function readConfig(options = {}) {
const config = createConfig();
const ciEnv = await getCiEnvironment();
const defaultConfig = getDefaultConfig();
config.load({
apiBaseUrl: options.apiBaseUrl || defaultConfig.apiBaseUrl,
commit: options.commit || defaultConfig.commit || ciEnv?.commit || null,
branch: options.branch || defaultConfig.branch || ciEnv?.branch || null,
token: options.token || defaultConfig.token || null,
buildName: options.buildName || defaultConfig.buildName || null,
prNumber: options.prNumber || defaultConfig.prNumber || ciEnv?.prNumber || null,
prHeadCommit: defaultConfig.prHeadCommit || ciEnv?.prHeadCommit || null,
prBaseBranch: defaultConfig.prBaseBranch || ciEnv?.prBaseBranch || null,
referenceBranch: options.referenceBranch || defaultConfig.referenceBranch || null,
referenceCommit: options.referenceCommit || defaultConfig.referenceCommit || null,
repository: ciEnv?.repository || null,
originalRepository: ciEnv?.originalRepository || null,
jobId: ciEnv?.jobId || null,
runId: ciEnv?.runId || null,
runAttempt: ciEnv?.runAttempt || null,
parallel: options.parallel ?? defaultConfig.parallel ?? false,
parallelNonce: options.parallelNonce || defaultConfig.parallelNonce || ciEnv?.nonce || null,
parallelTotal: options.parallelTotal ?? defaultConfig.parallelTotal ?? null,
parallelIndex: options.parallelIndex ?? defaultConfig.parallelIndex ?? null,
mode: options.mode || defaultConfig.mode || null,
ciProvider: ciEnv?.key || null,
previewBaseUrl: defaultConfig.previewBaseUrl || null,
skipped: options.skipped ?? defaultConfig.skipped ?? false,
subset: options.subset ?? defaultConfig.subset ?? false,
mergeQueue: ciEnv?.mergeQueue ?? false
});
if (!config.get("branch") || !config.get("commit")) {
throw new Error(
"Argos requires a branch and a commit to be set. If you are running in a non-git environment consider setting ARGOS_BRANCH and ARGOS_COMMIT environment variables."
);
}
config.validate();
return config.get();
}
async function getConfigFromOptions({
parallel,
...options
}) {
return readConfig({
...options,
parallel: parallel !== void 0 ? Boolean(parallel) : void 0,
parallelNonce: parallel ? parallel.nonce : void 0,
parallelTotal: parallel ? parallel.total : void 0,
parallelIndex: parallel ? parallel.index : void 0
});
}
// src/finalize.ts
import { createClient, throwAPIError } from "@argos-ci/api-client";
// src/auth.ts
var base64Encode = (obj) => Buffer.from(JSON.stringify(obj), "utf8").toString("base64");
function getAuthToken(args) {
const {
token,
ciProvider,
originalRepository: repository,
jobId,
runId,
prNumber
} = args;
if (token) {
return token;
}
switch (ciProvider) {
case "github-actions": {
if (!repository || !jobId || !runId) {
throw new Error(
`Automatic GitHub Actions variables detection failed. Please add the 'ARGOS_TOKEN'`
);
}
const [owner, repo] = repository.split("/");
return `tokenless-github-${base64Encode({
owner,
repository: repo,
jobId,
runId,
prNumber: prNumber ?? void 0
})}`;
}
default:
throw new Error("Missing Argos repository token 'ARGOS_TOKEN'");
}
}
// src/finalize.ts
async function finalize(params) {
const config = await readConfig({
parallelNonce: params.parallel?.nonce
});
const authToken = getAuthToken(config);
const apiClient = createClient({
baseUrl: config.apiBaseUrl,
authToken
});
if (!config.parallelNonce) {
throw new Error("parallel.nonce is required to finalize the build");
}
const finalizeBuildsResult = await apiClient.POST("/builds/finalize", {
body: {
parallelNonce: config.parallelNonce
}
});
if (finalizeBuildsResult.error) {
throwAPIError(finalizeBuildsResult.error);
}
return finalizeBuildsResult.data;
}
// src/upload.ts
import { createClient as createClient3, throwAPIError as throwAPIError3 } from "@argos-ci/api-client";
// src/discovery.ts
import { extname, resolve } from "path";
import glob from "fast-glob";
async function discoverSnapshots(patterns, { root = process.cwd(), ignore } = {}) {
debug(
`Discovering snapshots with patterns: ${Array.isArray(patterns) ? patterns.join(", ") : patterns} in ${root}`
);
const matches = await glob(patterns, { onlyFiles: true, ignore, cwd: root });
return matches.map((match) => {
debug(`Found screenshot: ${match}`);
const path = resolve(root, match);
return {
name: match,
path
};
});
}
function checkIsValidImageFile(filename) {
const lowerFilename = extname(filename).toLowerCase();
return lowerFilename === ".png" || lowerFilename === ".jpg" || lowerFilename === ".jpeg";
}
// src/optimize.ts
import { promisify } from "util";
import { basename } from "path";
import sharp from "sharp";
import tmp from "tmp";
var tmpFile = promisify(tmp.file);
var MAX_PIXELS = 8e7;
var DEFAULT_MAX_WIDTH = 2048;
async function optimizeScreenshot(filepath) {
if (!checkIsValidImageFile(filepath)) {
return filepath;
}
try {
const [resultFilePath, metadata] = await Promise.all([
tmpFile(),
sharp(filepath).metadata()
]);
const { width, height } = metadata;
const maxDimensions = (() => {
if (!width || !height) {
return {
width: DEFAULT_MAX_WIDTH,
height: Math.floor(MAX_PIXELS / DEFAULT_MAX_WIDTH)
};
}
const nbPixels = width * height;
if (nbPixels <= MAX_PIXELS) {
return null;
}
if (width < height) {
return {
width: DEFAULT_MAX_WIDTH,
height: Math.floor(MAX_PIXELS / DEFAULT_MAX_WIDTH)
};
}
const scaleFactor = Math.sqrt(MAX_PIXELS / nbPixels);
return {
width: Math.floor(width * scaleFactor),
height: Math.floor(height * scaleFactor)
};
})();
let operation = sharp(filepath);
if (maxDimensions) {
operation = operation.resize(maxDimensions.width, maxDimensions.height, {
fit: "inside",
withoutEnlargement: true
});
}
await operation.png({ force: true }).toFile(resultFilePath);
if (width && height && maxDimensions) {
const { width: maxWidth, height: maxHeight } = maxDimensions;
const widthRatio = maxWidth / width;
const heightRatio = maxHeight / height;
const scaleFactor = Math.min(widthRatio, heightRatio);
const newWidth = Math.floor(width * scaleFactor);
const newHeight = Math.floor(height * scaleFactor);
console.warn(
`Image ${basename(filepath)} resized from ${width}x${height} to ${newWidth}x${newHeight}.`
);
}
return resultFilePath;
} catch (error) {
const message = error instanceof Error ? error.message : "Unknown Error";
throw new Error(`Error while processing image (${filepath}): ${message}`, {
cause: error
});
}
}
// src/hashing.ts
import { createReadStream } from "fs";
import { createHash } from "crypto";
var hashFile = async (filepath) => {
const fileStream = createReadStream(filepath);
const hash = createHash("sha256");
await new Promise((resolve2, reject) => {
fileStream.on("error", reject);
hash.on("error", reject);
hash.on("finish", resolve2);
fileStream.pipe(hash);
});
return hash.digest("hex");
};
// src/s3.ts
import { readFile } from "fs/promises";
async function uploadFile(input) {
const file = await readFile(input.path);
const response = await fetch(input.url, {
method: "PUT",
headers: {
"Content-Type": input.contentType,
"Content-Length": file.length.toString()
},
signal: AbortSignal.timeout(3e4),
body: new Uint8Array(file)
});
if (!response.ok) {
throw new Error(
`Failed to upload file to ${input.url}: ${response.status} ${response.statusText}`
);
}
}
// src/util/chunk.ts
var chunk = (collection, size) => {
const result = [];
for (let x = 0; x < Math.ceil(collection.length / size); x++) {
const start = x * size;
const end = start + size;
result.push(collection.slice(start, end));
}
return result;
};
// src/upload.ts
import {
getPlaywrightTracePath,
readMetadata
} from "@argos-ci/util";
// src/version.ts
import { readVersionFromPackage } from "@argos-ci/util";
import { createRequire } from "module";
var require2 = createRequire(import.meta.url);
async function getArgosCoreSDKIdentifier() {
const pkgPath = require2.resolve("@argos-ci/core/package.json");
const version = await readVersionFromPackage(pkgPath);
return `@argos-ci/core@${version}`;
}
// src/mime-type.ts
import mime from "mime-types";
function getSnapshotMimeType(filepath) {
const type = mime.lookup(filepath);
if (!type) {
throw new Error(`Unable to determine snapshot file type for: ${filepath}`);
}
return type;
}
// src/skip.ts
import { createClient as createClient2, throwAPIError as throwAPIError2 } from "@argos-ci/api-client";
async function skip(params) {
const [config, argosSdk] = await Promise.all([
getConfigFromOptions(params),
getArgosCoreSDKIdentifier()
]);
const authToken = getAuthToken(config);
const apiClient = createClient2({
baseUrl: config.apiBaseUrl,
authToken
});
const createBuildResponse = await apiClient.POST("/builds", {
body: {
commit: config.commit,
branch: config.branch,
name: config.buildName,
mode: config.mode,
prNumber: config.prNumber,
prHeadCommit: config.prHeadCommit,
referenceBranch: config.referenceBranch,
referenceCommit: config.referenceCommit,
argosSdk,
ciProvider: config.ciProvider,
runId: config.runId,
runAttempt: config.runAttempt,
skipped: true,
screenshotKeys: [],
pwTraceKeys: [],
parentCommits: []
}
});
if (createBuildResponse.error) {
throwAPIError2(createBuildResponse.error);
}
return { build: createBuildResponse.data.build };
}
// src/upload.ts
var CHUNK_SIZE = 10;
async function upload(params) {
debug("Starting upload with params", params);
const [config, argosSdk] = await Promise.all([
getConfigFromOptions(params),
getArgosCoreSDKIdentifier()
]);
const authToken = getAuthToken(config);
const apiClient = createClient3({
baseUrl: config.apiBaseUrl,
authToken
});
if (config.skipped) {
const { build } = await skip(params);
return { build, screenshots: [] };
}
const previewUrlFormatter = params.previewUrl ?? (config.previewBaseUrl ? { baseUrl: config.previewBaseUrl } : void 0);
const globs = params.files ?? ["**/*.{png,jpg,jpeg}"];
debug("Using config and files", config, globs);
const files = await discoverSnapshots(globs, {
root: params.root,
ignore: params.ignore
});
debug("Found snapshots", files);
const snapshots = await Promise.all(
files.map(async (snapshot) => {
const contentType = getSnapshotMimeType(snapshot.path);
const [metadata, pwTracePath, optimizedPath] = await Promise.all([
readMetadata(snapshot.path),
getPlaywrightTracePath(snapshot.path),
contentType.startsWith("image/") ? optimizeScreenshot(snapshot.path) : snapshot.path
]);
const [hash, pwTraceHash] = await Promise.all([
hashFile(optimizedPath),
pwTracePath ? hashFile(pwTracePath) : null
]);
const threshold = metadata?.transient?.threshold ?? null;
const baseName = metadata?.transient?.baseName ?? null;
const parentName = metadata?.transient?.parentName ?? null;
if (metadata) {
delete metadata.transient;
if (metadata.url && previewUrlFormatter) {
metadata.previewUrl = formatPreviewUrl(
metadata.url,
previewUrlFormatter
);
}
}
return {
...snapshot,
hash,
optimizedPath,
metadata,
threshold,
baseName,
parentName,
pwTrace: pwTracePath && pwTraceHash ? { path: pwTracePath, hash: pwTraceHash } : null,
contentType
};
})
);
debug("Fetch project");
const projectResponse = await apiClient.GET("/project");
if (projectResponse.error) {
throwAPIError3(projectResponse.error);
}
debug("Project fetched", projectResponse.data);
const { defaultBaseBranch, hasRemoteContentAccess } = projectResponse.data;
const referenceCommit = (() => {
if (config.referenceCommit) {
debug("Found reference commit in config", config.referenceCommit);
return config.referenceCommit;
}
if (hasRemoteContentAccess) {
return null;
}
const base = config.referenceBranch || config.prBaseBranch || defaultBaseBranch;
const sha = getMergeBaseCommitSha2({ base, head: config.branch });
if (sha) {
debug("Found merge base", sha);
} else {
debug("No merge base found");
}
return sha;
})();
const parentCommits = (() => {
if (hasRemoteContentAccess) {
return null;
}
if (referenceCommit) {
const commits = listParentCommits2({ sha: referenceCommit });
if (commits) {
debug("Found parent commits", commits);
} else {
debug("No parent commits found");
}
return commits;
}
return null;
})();
debug("Creating build");
const [pwTraceKeys, snapshotKeys] = snapshots.reduce(
([pwTraceKeys2, snapshotKeys2], snapshot) => {
if (snapshot.pwTrace && !pwTraceKeys2.includes(snapshot.pwTrace.hash)) {
pwTraceKeys2.push(snapshot.pwTrace.hash);
}
if (!snapshotKeys2.includes(snapshot.hash)) {
snapshotKeys2.push(snapshot.hash);
}
return [pwTraceKeys2, snapshotKeys2];
},
[[], []]
);
const createBuildResponse = await apiClient.POST("/builds", {
body: {
commit: config.commit,
branch: config.branch,
name: config.buildName,
mode: config.mode,
parallel: config.parallel,
parallelNonce: config.parallelNonce,
screenshotKeys: snapshotKeys,
pwTraceKeys,
prNumber: config.prNumber,
prHeadCommit: config.prHeadCommit,
referenceBranch: config.referenceBranch,
referenceCommit,
parentCommits,
argosSdk,
ciProvider: config.ciProvider,
runId: config.runId,
runAttempt: config.runAttempt,
mergeQueue: config.mergeQueue,
subset: config.subset
}
});
if (createBuildResponse.error) {
throwAPIError3(createBuildResponse.error);
}
const result = createBuildResponse.data;
debug("Got uploads url", result);
const uploadFiles = [
...result.screenshots.map(({ key, putUrl }) => {
const snapshot = snapshots.find((s) => s.hash === key);
if (!snapshot) {
throw new Error(`Invariant: snapshot with hash ${key} not found`);
}
return {
url: putUrl,
path: snapshot.optimizedPath,
contentType: snapshot.contentType
};
}),
...result.pwTraces?.map(({ key, putUrl }) => {
const snapshot = snapshots.find(
(s) => s.pwTrace && s.pwTrace.hash === key
);
if (!snapshot || !snapshot.pwTrace) {
throw new Error(`Invariant: trace with ${key} not found`);
}
return {
url: putUrl,
path: snapshot.pwTrace.path,
contentType: "application/json"
};
}) ?? []
];
await uploadFilesToS3(uploadFiles);
debug("Updating build");
const uploadBuildResponse = await apiClient.PUT("/builds/{buildId}", {
params: {
path: {
buildId: result.build.id
}
},
body: {
screenshots: snapshots.map((snapshot) => ({
key: snapshot.hash,
name: snapshot.name,
metadata: snapshot.metadata,
pwTraceKey: snapshot.pwTrace?.hash ?? null,
threshold: snapshot.threshold ?? config?.threshold ?? null,
baseName: snapshot.baseName,
parentName: snapshot.parentName,
contentType: snapshot.contentType
})),
parallel: config.parallel,
parallelTotal: config.parallelTotal,
parallelIndex: config.parallelIndex,
metadata: params.metadata
}
});
if (uploadBuildResponse.error) {
throwAPIError3(uploadBuildResponse.error);
}
return { build: uploadBuildResponse.data.build, screenshots: snapshots };
}
async function uploadFilesToS3(files) {
debug(`Split files in chunks of ${CHUNK_SIZE}`);
const chunks = chunk(files, CHUNK_SIZE);
debug(`Starting upload of ${chunks.length} chunks`);
for (let i = 0; i < chunks.length; i++) {
debug(`Uploading chunk ${i + 1}/${chunks.length}`);
const timeLabel = `Chunk ${i + 1}/${chunks.length}`;
debugTime(timeLabel);
const chunk2 = chunks[i];
if (!chunk2) {
throw new Error(`Invariant: chunk ${i} is empty`);
}
await Promise.all(
chunk2.map(async ({ url, path, contentType }) => {
await uploadFile({
url,
path,
contentType
});
})
);
debugTimeEnd(timeLabel);
}
}
function formatPreviewUrl(url, formatter) {
if (typeof formatter === "function") {
return formatter(url);
}
const urlObj = new URL(url);
return new URL(
urlObj.pathname + urlObj.search + urlObj.hash,
formatter.baseUrl
).href;
}
export {
finalize,
getConfigFromOptions,
readConfig,
skip,
upload
};