@argos-ci/core
Version:
Node.js SDK for visual testing with Argos.
1,127 lines (1,098 loc) • 31.5 kB
JavaScript
// src/upload.ts
import { createClient, throwAPIError } from "@argos-ci/api-client";
// src/config.ts
import convict from "convict";
// src/ci-environment/git.ts
import { execSync } from "node:child_process";
// src/debug.ts
import createDebug from "debug";
var KEY = "@argos-ci/core";
var debug = createDebug(KEY);
var isDebugEnabled = createDebug.enabled(KEY);
var debugTime = (arg) => {
if (isDebugEnabled) {
console.time(arg);
}
};
var debugTimeEnd = (arg) => {
if (isDebugEnabled) {
console.timeEnd(arg);
}
};
// src/ci-environment/git.ts
function checkIsGitRepository() {
try {
return execSync("git rev-parse --is-inside-work-tree").toString().trim() === "true";
} catch {
return false;
}
}
function head() {
try {
return execSync("git rev-parse HEAD").toString().trim();
} catch {
return null;
}
}
function branch() {
try {
const headRef = execSync("git rev-parse --abbrev-ref HEAD").toString().trim();
if (headRef === "HEAD") {
return null;
}
return headRef;
} catch {
return null;
}
}
function getMergeBaseCommitShaWithDepth(input) {
execSync(
`git fetch --update-head-ok --depth ${input.depth} origin ${input.head}:${input.head}`
);
execSync(
`git fetch --update-head-ok --depth ${input.depth} origin ${input.base}:${input.base}`
);
const mergeBase = execSync(`git merge-base ${input.head} ${input.base}`).toString().trim();
return mergeBase || null;
}
function getMergeBaseCommitSha(input) {
let depth = 200;
while (depth < 1e3) {
const mergeBase = getMergeBaseCommitShaWithDepth({
depth,
...input
});
if (mergeBase) {
return mergeBase;
}
depth += 200;
}
if (isDebugEnabled) {
const headShas = listShas(input.head);
const baseShas = listShas(input.base);
debug(
`No merge base found for ${input.head} and ${input.base} with depth ${depth}`
);
debug(
`Found ${headShas.length} commits in ${input.head}: ${headShas.join(", ")}`
);
debug(
`Found ${baseShas.length} commits in ${input.base}: ${baseShas.join(", ")}`
);
}
return null;
}
function listShas(path, maxCount) {
const maxCountArg = maxCount ? `--max-count=${maxCount}` : "";
const raw = execSync(`git log --format="%H" ${maxCountArg} ${path}`.trim());
const shas = raw.toString().trim().split("\n");
return shas;
}
function listParentCommits(input) {
const limit = 200;
execSync(`git fetch --depth=${limit} origin ${input.sha}`);
return listShas(input.sha, limit);
}
// src/ci-environment/services/bitrise.ts
var getPrNumber = ({ env }) => {
return env.BITRISE_PULL_REQUEST ? Number(env.BITRISE_PULL_REQUEST) : null;
};
var service = {
name: "Bitrise",
key: "bitrise",
detect: ({ env }) => Boolean(env.BITRISE_IO),
config: ({ env }) => {
return {
commit: env.BITRISE_GIT_COMMIT || null,
branch: env.BITRISE_GIT_BRANCH || null,
owner: env.BITRISEIO_GIT_REPOSITORY_OWNER || null,
repository: env.BITRISEIO_GIT_REPOSITORY_SLUG || null,
jobId: null,
runId: null,
runAttempt: null,
prNumber: getPrNumber({ env }),
prHeadCommit: null,
prBaseBranch: null,
nonce: env.BITRISEIO_PIPELINE_ID || null
};
},
getMergeBaseCommitSha,
listParentCommits
};
var bitrise_default = service;
// src/ci-environment/services/buildkite.ts
var service2 = {
name: "Buildkite",
key: "buildkite",
detect: ({ env }) => Boolean(env.BUILDKITE),
config: ({ env }) => {
return {
// Buildkite doesn't work well so we fallback to git to ensure we have commit and branch
commit: env.BUILDKITE_COMMIT || head() || null,
branch: env.BUILDKITE_BRANCH || branch() || null,
owner: env.BUILDKITE_ORGANIZATION_SLUG || null,
repository: env.BUILDKITE_PROJECT_SLUG || null,
jobId: null,
runId: null,
runAttempt: null,
prNumber: env.BUILDKITE_PULL_REQUEST ? Number(env.BUILDKITE_PULL_REQUEST) : null,
prHeadCommit: null,
prBaseBranch: null,
nonce: env.BUILDKITE_BUILD_ID || null
};
},
getMergeBaseCommitSha,
listParentCommits
};
var buildkite_default = service2;
// src/ci-environment/services/heroku.ts
var service3 = {
name: "Heroku",
key: "heroku",
detect: ({ env }) => Boolean(env.HEROKU_TEST_RUN_ID),
config: ({ env }) => ({
commit: env.HEROKU_TEST_RUN_COMMIT_VERSION || null,
branch: env.HEROKU_TEST_RUN_BRANCH || null,
owner: null,
repository: null,
jobId: null,
runId: null,
runAttempt: null,
prNumber: null,
prHeadCommit: null,
prBaseBranch: null,
nonce: env.HEROKU_TEST_RUN_ID || null
}),
getMergeBaseCommitSha,
listParentCommits
};
var heroku_default = service3;
// src/ci-environment/services/github-actions.ts
import { existsSync, readFileSync } from "node:fs";
import axios from "axios";
async function getPullRequestFromHeadSha({ env }, sha) {
debug("Fetching pull request number from head sha", sha);
if (!env.GITHUB_REPOSITORY) {
throw new Error("GITHUB_REPOSITORY is missing");
}
if (!env.GITHUB_TOKEN) {
if (!env.DISABLE_GITHUB_TOKEN_WARNING) {
console.log(
`
Argos couldn\u2019t find a relevant pull request in the current environment.
To resolve this, Argos requires a GITHUB_TOKEN to fetch the pull request associated with the head SHA. Please ensure the following environment variable is added:
GITHUB_TOKEN: \${{ secrets.GITHUB_TOKEN }}
For more details, check out the documentation: Read more at https://argos-ci.com/docs/run-on-preview-deployment
If you want to disable this warning, you can set the following environment variable:
DISABLE_GITHUB_TOKEN_WARNING: true
`.trim()
);
}
return null;
}
try {
const result = await axios.get(
`https://api.github.com/repos/${env.GITHUB_REPOSITORY}/pulls`,
{
params: {
state: "open",
sort: "updated",
per_page: 30,
page: 1
},
headers: {
Accept: "application/vnd.github+json",
Authorization: `Bearer ${process.env.GITHUB_TOKEN}`,
"X-GitHub-Api-Version": "2022-11-28"
}
}
);
if (result.data.length === 0) {
debug("Aborting because no pull request found");
return null;
}
const matchingPr = result.data.find((pr) => pr.head.sha === sha);
if (matchingPr) {
debug("Pull request found", matchingPr);
return matchingPr;
}
debug("Aborting because no pull request found");
return null;
} catch (error) {
debug("Error while fetching pull request from head sha", error);
return null;
}
}
function getBranchFromContext(context) {
const { env } = context;
if (env.GITHUB_HEAD_REF) {
return env.GITHUB_HEAD_REF;
}
if (!env.GITHUB_REF) {
return null;
}
const branchRegex = /refs\/heads\/(.*)/;
const matches = branchRegex.exec(env.GITHUB_REF);
return matches?.[1] ?? null;
}
function getBranchFromPayload(payload) {
if ("workflow_run" in payload && payload.workflow_run) {
return payload.workflow_run.head_branch;
}
if ("deployment" in payload && payload.deployment) {
return payload.deployment.environment;
}
return null;
}
function getRepositoryFromContext({ env }) {
if (!env.GITHUB_REPOSITORY) {
return null;
}
return env.GITHUB_REPOSITORY.split("/")[1] || null;
}
function readEventPayload({ env }) {
if (!env.GITHUB_EVENT_PATH) {
return null;
}
if (!existsSync(env.GITHUB_EVENT_PATH)) {
return null;
}
return JSON.parse(readFileSync(env.GITHUB_EVENT_PATH, "utf-8"));
}
function getPullRequestFromPayload(payload) {
if ("pull_request" in payload && payload.pull_request && payload.pull_request) {
return payload.pull_request;
}
if ("workflow_run" in payload && payload.workflow_run && payload.workflow_run.pull_requests[0]) {
return payload.workflow_run.pull_requests[0];
}
if ("check_run" in payload && payload.check_run && "pull_requests" in payload.check_run && payload.check_run.pull_requests[0]) {
return payload.check_run.pull_requests[0];
}
return null;
}
var service4 = {
name: "GitHub Actions",
key: "github-actions",
detect: (context) => Boolean(context.env.GITHUB_ACTIONS),
config: async (context) => {
const { env } = context;
const payload = readEventPayload(context);
const sha = process.env.GITHUB_SHA || null;
if (!sha) {
throw new Error(`GITHUB_SHA is missing`);
}
const pullRequest = payload ? getPullRequestFromPayload(payload) : await getPullRequestFromHeadSha(context, sha);
return {
commit: sha,
owner: env.GITHUB_REPOSITORY_OWNER || null,
repository: getRepositoryFromContext(context),
jobId: env.GITHUB_JOB || null,
runId: env.GITHUB_RUN_ID || null,
runAttempt: env.GITHUB_RUN_ATTEMPT ? Number(env.GITHUB_RUN_ATTEMPT) : null,
nonce: `${env.GITHUB_RUN_ID}-${env.GITHUB_RUN_ATTEMPT}`,
branch: getBranchFromContext(context) || pullRequest?.head.ref || (payload ? getBranchFromPayload(payload) : null) || null,
prNumber: pullRequest?.number || null,
prHeadCommit: pullRequest?.head.sha ?? null,
prBaseBranch: pullRequest?.base.ref ?? null
};
},
getMergeBaseCommitSha,
listParentCommits
};
var github_actions_default = service4;
// src/ci-environment/services/circleci.ts
var getPrNumber2 = ({ env }) => {
const branchRegex = /pull\/(\d+)/;
const matches = branchRegex.exec(env.CIRCLE_PULL_REQUEST || "");
if (matches) {
return Number(matches[1]);
}
return null;
};
var service5 = {
name: "CircleCI",
key: "circleci",
detect: ({ env }) => Boolean(env.CIRCLECI),
config: ({ env }) => {
return {
commit: env.CIRCLE_SHA1 || null,
branch: env.CIRCLE_BRANCH || null,
owner: env.CIRCLE_PROJECT_USERNAME || null,
repository: env.CIRCLE_PROJECT_REPONAME || null,
jobId: null,
runId: null,
runAttempt: null,
prNumber: getPrNumber2({ env }),
prHeadCommit: null,
prBaseBranch: null,
nonce: env.CIRCLE_WORKFLOW_ID || env.CIRCLE_BUILD_NUM || null
};
},
getMergeBaseCommitSha,
listParentCommits
};
var circleci_default = service5;
// src/ci-environment/services/travis.ts
var getOwner = ({ env }) => {
if (!env.TRAVIS_REPO_SLUG) {
return null;
}
return env.TRAVIS_REPO_SLUG.split("/")[0] || null;
};
var getRepository = ({ env }) => {
if (!env.TRAVIS_REPO_SLUG) {
return null;
}
return env.TRAVIS_REPO_SLUG.split("/")[1] || null;
};
var getPrNumber3 = ({ env }) => {
if (env.TRAVIS_PULL_REQUEST) {
return Number(env.TRAVIS_PULL_REQUEST);
}
return null;
};
var service6 = {
name: "Travis CI",
key: "travis",
detect: ({ env }) => Boolean(env.TRAVIS),
config: (ctx) => {
const { env } = ctx;
return {
commit: env.TRAVIS_COMMIT || null,
branch: env.TRAVIS_BRANCH || null,
owner: getOwner(ctx),
repository: getRepository(ctx),
jobId: null,
runId: null,
runAttempt: null,
prNumber: getPrNumber3(ctx),
prHeadCommit: null,
prBaseBranch: null,
nonce: env.TRAVIS_BUILD_ID || null
};
},
getMergeBaseCommitSha,
listParentCommits
};
var travis_default = service6;
// src/ci-environment/services/gitlab.ts
var service7 = {
name: "GitLab",
key: "gitlab",
detect: ({ env }) => env.GITLAB_CI === "true",
config: ({ env }) => {
return {
commit: env.CI_COMMIT_SHA || null,
branch: env.CI_COMMIT_REF_NAME || null,
owner: null,
repository: null,
jobId: null,
runId: null,
runAttempt: null,
prNumber: null,
prHeadCommit: null,
prBaseBranch: null,
nonce: env.CI_PIPELINE_ID || null
};
},
getMergeBaseCommitSha,
listParentCommits
};
var gitlab_default = service7;
// src/ci-environment/services/git.ts
var service8 = {
name: "Git",
key: "git",
detect: () => checkIsGitRepository(),
config: () => {
return {
commit: head() || null,
branch: branch() || null,
owner: null,
repository: null,
jobId: null,
runId: null,
runAttempt: null,
prNumber: null,
prHeadCommit: null,
prBaseBranch: null,
nonce: null
};
},
getMergeBaseCommitSha,
listParentCommits
};
var git_default = service8;
// src/ci-environment/index.ts
var services = [
heroku_default,
github_actions_default,
circleci_default,
travis_default,
buildkite_default,
gitlab_default,
bitrise_default,
git_default
];
function createContext() {
return { env: process.env };
}
function getCiService(context) {
return services.find((service9) => service9.detect(context));
}
function getMergeBaseCommitSha2(input) {
const context = createContext();
const service9 = getCiService(context);
if (!service9) {
return null;
}
return service9.getMergeBaseCommitSha(input, context);
}
function listParentCommits2(input) {
const context = createContext();
const service9 = getCiService(context);
if (!service9) {
return null;
}
return service9.listParentCommits(input, context);
}
async function getCiEnvironment() {
const context = createContext();
debug("Detecting CI environment", context);
const service9 = getCiService(context);
if (service9) {
debug("Internal service matched", service9.name);
const variables = await service9.config(context);
const ciEnvironment = {
name: service9.name,
key: service9.key,
...variables
};
debug("CI environment", ciEnvironment);
return ciEnvironment;
}
return null;
}
// src/config.ts
var mustBeApiBaseUrl = (value) => {
const URL_REGEX = /https?:\/\/(www\.)?[-a-zA-Z0-9@:%._+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_+.~#?&//=]*)/;
if (!URL_REGEX.test(value)) {
throw new Error("Invalid Argos API base URL");
}
};
var mustBeCommit = (value) => {
const SHA1_REGEX = /^[0-9a-f]{40}$/;
if (!SHA1_REGEX.test(value)) {
const SHA1_SHORT_REGEX = /^[0-9a-f]{7}$/;
if (SHA1_SHORT_REGEX.test(value)) {
throw new Error("Short SHA1 is not allowed");
}
throw new Error("Invalid commit");
}
};
var mustBeArgosToken = (value) => {
if (value && value.length !== 40) {
throw new Error("Invalid Argos repository token (must be 40 characters)");
}
};
convict.addFormat({
name: "float-percent",
validate: function(val) {
if (val !== 0 && (!val || val > 1 || val < 0)) {
throw new Error("Must be a float between 0 and 1, inclusive.");
}
},
coerce: function(val) {
return parseFloat(val);
}
});
var schema = {
apiBaseUrl: {
env: "ARGOS_API_BASE_URL",
default: "https://api.argos-ci.com/v2/",
format: mustBeApiBaseUrl
},
commit: {
env: "ARGOS_COMMIT",
default: null,
format: mustBeCommit
},
branch: {
env: "ARGOS_BRANCH",
default: null,
format: String
},
token: {
env: "ARGOS_TOKEN",
default: null,
format: mustBeArgosToken
},
buildName: {
env: "ARGOS_BUILD_NAME",
default: null,
format: String,
nullable: true
},
mode: {
env: "ARGOS_MODE",
format: ["ci", "monitoring"],
default: null,
nullable: true
},
prNumber: {
env: "ARGOS_PR_NUMBER",
format: Number,
default: null,
nullable: true
},
prHeadCommit: {
env: "ARGOS_PR_HEAD_COMMIT",
format: String,
default: null,
nullable: true
},
prBaseBranch: {
env: "ARGOS_PR_BASE_BRANCH",
format: String,
default: null,
nullable: true
},
parallel: {
env: "ARGOS_PARALLEL",
default: false,
format: Boolean
},
parallelNonce: {
env: "ARGOS_PARALLEL_NONCE",
format: String,
default: null,
nullable: true
},
parallelIndex: {
env: "ARGOS_PARALLEL_INDEX",
format: "nat",
default: null,
nullable: true
},
parallelTotal: {
env: "ARGOS_PARALLEL_TOTAL",
format: "int",
default: null,
nullable: true
},
referenceBranch: {
env: "ARGOS_REFERENCE_BRANCH",
format: String,
default: null,
nullable: true
},
referenceCommit: {
env: "ARGOS_REFERENCE_COMMIT",
format: String,
default: null,
nullable: true
},
jobId: {
format: String,
default: null,
nullable: true
},
runId: {
format: String,
default: null,
nullable: true
},
runAttempt: {
format: "nat",
default: null,
nullable: true
},
owner: {
format: String,
default: null,
nullable: true
},
repository: {
format: String,
default: null,
nullable: true
},
ciProvider: {
format: String,
default: null,
nullable: true
},
threshold: {
env: "ARGOS_THRESHOLD",
format: "float-percent",
default: null,
nullable: true
},
previewBaseUrl: {
env: "ARGOS_PREVIEW_BASE_URL",
format: String,
default: null,
nullable: true
}
};
var createConfig = () => {
return convict(schema, {
args: []
});
};
async function readConfig(options = {}) {
const config = createConfig();
const ciEnv = await getCiEnvironment();
config.load({
apiBaseUrl: options.apiBaseUrl || config.get("apiBaseUrl"),
commit: options.commit || config.get("commit") || ciEnv?.commit || null,
branch: options.branch || config.get("branch") || ciEnv?.branch || null,
token: options.token || config.get("token") || null,
buildName: options.buildName || config.get("buildName") || null,
prNumber: options.prNumber || config.get("prNumber") || ciEnv?.prNumber || null,
prHeadCommit: config.get("prHeadCommit") || ciEnv?.prHeadCommit || null,
prBaseBranch: config.get("prBaseBranch") || ciEnv?.prBaseBranch || null,
referenceBranch: options.referenceBranch || config.get("referenceBranch") || null,
referenceCommit: options.referenceCommit || config.get("referenceCommit") || null,
owner: ciEnv?.owner || null,
repository: ciEnv?.repository || null,
jobId: ciEnv?.jobId || null,
runId: ciEnv?.runId || null,
runAttempt: ciEnv?.runAttempt || null,
parallel: options.parallel ?? config.get("parallel") ?? false,
parallelNonce: options.parallelNonce || config.get("parallelNonce") || ciEnv?.nonce || null,
parallelTotal: options.parallelTotal ?? config.get("parallelTotal") ?? null,
parallelIndex: options.parallelIndex ?? config.get("parallelIndex") ?? null,
mode: options.mode || config.get("mode") || null,
ciProvider: ciEnv?.key || null,
previewBaseUrl: config.get("previewBaseUrl") || null
});
config.validate();
return config.get();
}
// src/discovery.ts
import { resolve } from "node:path";
import glob from "fast-glob";
var discoverScreenshots = async (patterns, { root = process.cwd(), ignore } = {}) => {
debug(
`Discovering screenshots with patterns: ${Array.isArray(patterns) ? patterns.join(", ") : patterns} in ${root}`
);
const matches = await glob(patterns, { onlyFiles: true, ignore, cwd: root });
return matches.map((match) => {
debug(`Found screenshot: ${match}`);
const path = resolve(root, match);
return {
name: match,
path
};
});
};
// src/optimize.ts
import { promisify } from "node:util";
import { basename } from "node:path";
import sharp from "sharp";
import tmp from "tmp";
var tmpFile = promisify(tmp.file);
var MAX_PIXELS = 8e7;
var DEFAULT_MAX_WIDTH = 2048;
var optimizeScreenshot = async (filepath) => {
try {
const [resultFilePath, metadata] = await Promise.all([
tmpFile(),
sharp(filepath).metadata()
]);
const { width, height } = metadata;
const maxDimensions = (() => {
if (!width || !height) {
return {
width: DEFAULT_MAX_WIDTH,
height: Math.floor(MAX_PIXELS / DEFAULT_MAX_WIDTH)
};
}
const nbPixels = width * height;
if (nbPixels <= MAX_PIXELS) {
return null;
}
if (width < height) {
return {
width: DEFAULT_MAX_WIDTH,
height: Math.floor(MAX_PIXELS / DEFAULT_MAX_WIDTH)
};
}
const scaleFactor = Math.sqrt(MAX_PIXELS / nbPixels);
return {
width: Math.floor(width * scaleFactor),
height: Math.floor(height * scaleFactor)
};
})();
let operation = sharp(filepath);
if (maxDimensions) {
operation = operation.resize(maxDimensions.width, maxDimensions.height, {
fit: "inside",
withoutEnlargement: true
});
}
await operation.png({ force: true }).toFile(resultFilePath);
if (width && height && maxDimensions) {
const { width: maxWidth, height: maxHeight } = maxDimensions;
const widthRatio = maxWidth / width;
const heightRatio = maxHeight / height;
const scaleFactor = Math.min(widthRatio, heightRatio);
const newWidth = Math.floor(width * scaleFactor);
const newHeight = Math.floor(height * scaleFactor);
console.warn(
`Image ${basename(filepath)} resized from ${width}x${height} to ${newWidth}x${newHeight}.`
);
}
return resultFilePath;
} catch (error) {
const message = error instanceof Error ? error.message : "Unknown Error";
throw new Error(`Error while processing image (${filepath}): ${message}`, {
cause: error
});
}
};
// src/hashing.ts
import { createReadStream } from "node:fs";
import { createHash } from "node:crypto";
var hashFile = async (filepath) => {
const fileStream = createReadStream(filepath);
const hash = createHash("sha256");
await new Promise((resolve2, reject) => {
fileStream.on("error", reject);
hash.on("error", reject);
hash.on("finish", resolve2);
fileStream.pipe(hash);
});
return hash.digest("hex");
};
// src/auth.ts
var base64Encode = (obj) => Buffer.from(JSON.stringify(obj), "utf8").toString("base64");
function getAuthToken({
token,
ciProvider,
owner,
repository,
jobId,
runId,
prNumber
}) {
if (token) {
return token;
}
switch (ciProvider) {
case "github-actions": {
if (!owner || !repository || !jobId || !runId) {
throw new Error(
`Automatic GitHub Actions variables detection failed. Please add the 'ARGOS_TOKEN'`
);
}
return `tokenless-github-${base64Encode({
owner,
repository,
jobId,
runId,
prNumber
})}`;
}
default:
throw new Error("Missing Argos repository token 'ARGOS_TOKEN'");
}
}
// src/s3.ts
import { readFile } from "node:fs/promises";
import axios2 from "axios";
var upload = async (input) => {
const file = await readFile(input.path);
await axios2({
method: "PUT",
url: input.url,
data: file,
headers: {
"Content-Type": input.contentType
}
});
};
// src/util/chunk.ts
var chunk = (collection, size) => {
const result = [];
for (let x = 0; x < Math.ceil(collection.length / size); x++) {
const start = x * size;
const end = start + size;
result.push(collection.slice(start, end));
}
return result;
};
// src/upload.ts
import { getPlaywrightTracePath, readMetadata } from "@argos-ci/util";
// src/version.ts
import { readVersionFromPackage } from "@argos-ci/util";
import { createRequire } from "node:module";
var require2 = createRequire(import.meta.url);
async function getArgosCoreSDKIdentifier() {
const pkgPath = require2.resolve("@argos-ci/core/package.json");
const version = await readVersionFromPackage(pkgPath);
return `@argos-ci/core@${version}`;
}
// src/upload.ts
var CHUNK_SIZE = 10;
async function getConfigFromOptions({
parallel,
...options
}) {
return readConfig({
...options,
parallel: Boolean(parallel),
parallelNonce: parallel ? parallel.nonce : null,
parallelTotal: parallel ? parallel.total : null,
parallelIndex: parallel ? parallel.index : null
});
}
async function uploadFilesToS3(files) {
debug(`Split files in chunks of ${CHUNK_SIZE}`);
const chunks = chunk(files, CHUNK_SIZE);
debug(`Starting upload of ${chunks.length} chunks`);
for (let i = 0; i < chunks.length; i++) {
debug(`Uploading chunk ${i + 1}/${chunks.length}`);
const timeLabel = `Chunk ${i + 1}/${chunks.length}`;
debugTime(timeLabel);
const chunk2 = chunks[i];
if (!chunk2) {
throw new Error(`Invariant: chunk ${i} is empty`);
}
await Promise.all(
chunk2.map(async ({ url, path, contentType }) => {
await upload({
url,
path,
contentType
});
})
);
debugTimeEnd(timeLabel);
}
}
function formatPreviewUrl(url, formatter) {
if (typeof formatter === "function") {
return formatter(url);
}
const urlObj = new URL(url);
return new URL(
urlObj.pathname + urlObj.search + urlObj.hash,
formatter.baseUrl
).href;
}
async function upload2(params) {
debug("Starting upload with params", params);
const [config, argosSdk] = await Promise.all([
getConfigFromOptions(params),
getArgosCoreSDKIdentifier()
]);
const previewUrlFormatter = params.previewUrl ?? (config.previewBaseUrl ? { baseUrl: config.previewBaseUrl } : void 0);
const files = params.files ?? ["**/*.{png,jpg,jpeg}"];
debug("Using config and files", config, files);
const authToken = getAuthToken(config);
const apiClient = createClient({
baseUrl: config.apiBaseUrl,
authToken
});
const foundScreenshots = await discoverScreenshots(files, {
root: params.root,
ignore: params.ignore
});
debug("Found screenshots", foundScreenshots);
const screenshots = await Promise.all(
foundScreenshots.map(async (screenshot) => {
const [metadata, pwTracePath, optimizedPath] = await Promise.all([
readMetadata(screenshot.path),
getPlaywrightTracePath(screenshot.path),
optimizeScreenshot(screenshot.path)
]);
const [hash, pwTraceHash] = await Promise.all([
hashFile(optimizedPath),
pwTracePath ? hashFile(pwTracePath) : null
]);
const threshold = metadata?.transient?.threshold ?? null;
const baseName = metadata?.transient?.baseName ?? null;
if (metadata) {
delete metadata.transient;
if (metadata.url && previewUrlFormatter) {
metadata.previewUrl = formatPreviewUrl(
metadata.url,
previewUrlFormatter
);
}
}
return {
...screenshot,
hash,
optimizedPath,
metadata,
threshold,
baseName,
pwTrace: pwTracePath && pwTraceHash ? { path: pwTracePath, hash: pwTraceHash } : null
};
})
);
debug("Fetch project");
const projectResponse = await apiClient.GET("/project");
if (projectResponse.error) {
throwAPIError(projectResponse.error);
}
debug("Project fetched", projectResponse.data);
const { defaultBaseBranch, hasRemoteContentAccess } = projectResponse.data;
const referenceCommit = (() => {
if (config.referenceCommit) {
debug("Found reference commit in config", config.referenceCommit);
return config.referenceCommit;
}
if (hasRemoteContentAccess) {
return null;
}
const base = config.referenceBranch || config.prBaseBranch || defaultBaseBranch;
const sha = getMergeBaseCommitSha2({ base, head: config.branch });
if (sha) {
debug("Found merge base", sha);
} else {
debug("No merge base found");
}
return sha;
})();
const parentCommits = (() => {
if (hasRemoteContentAccess) {
return null;
}
if (referenceCommit) {
const commits = listParentCommits2({ sha: referenceCommit });
if (commits) {
debug("Found parent commits", commits);
} else {
debug("No parent commits found");
}
return commits;
}
return null;
})();
debug("Creating build");
const [pwTraceKeys, screenshotKeys] = screenshots.reduce(
([pwTraceKeys2, screenshotKeys2], screenshot) => {
if (screenshot.pwTrace && !pwTraceKeys2.includes(screenshot.pwTrace.hash)) {
pwTraceKeys2.push(screenshot.pwTrace.hash);
}
if (!screenshotKeys2.includes(screenshot.hash)) {
screenshotKeys2.push(screenshot.hash);
}
return [pwTraceKeys2, screenshotKeys2];
},
[[], []]
);
const createBuildResponse = await apiClient.POST("/builds", {
body: {
commit: config.commit,
branch: config.branch,
name: config.buildName,
mode: config.mode,
parallel: config.parallel,
parallelNonce: config.parallelNonce,
screenshotKeys,
pwTraceKeys,
prNumber: config.prNumber,
prHeadCommit: config.prHeadCommit,
referenceBranch: config.referenceBranch,
referenceCommit,
parentCommits,
argosSdk,
ciProvider: config.ciProvider,
runId: config.runId,
runAttempt: config.runAttempt
}
});
if (createBuildResponse.error) {
throwAPIError(createBuildResponse.error);
}
const result = createBuildResponse.data;
debug("Got uploads url", result);
const uploadFiles = [
...result.screenshots.map(({ key, putUrl }) => {
const screenshot = screenshots.find((s) => s.hash === key);
if (!screenshot) {
throw new Error(`Invariant: screenshot with hash ${key} not found`);
}
return {
url: putUrl,
path: screenshot.optimizedPath,
contentType: "image/png"
};
}),
...result.pwTraces?.map(({ key, putUrl }) => {
const screenshot = screenshots.find(
(s) => s.pwTrace && s.pwTrace.hash === key
);
if (!screenshot || !screenshot.pwTrace) {
throw new Error(`Invariant: trace with ${key} not found`);
}
return {
url: putUrl,
path: screenshot.pwTrace.path,
contentType: "application/json"
};
}) ?? []
];
await uploadFilesToS3(uploadFiles);
debug("Updating build");
const uploadBuildResponse = await apiClient.PUT("/builds/{buildId}", {
params: {
path: {
buildId: result.build.id
}
},
body: {
screenshots: screenshots.map((screenshot) => ({
key: screenshot.hash,
name: screenshot.name,
metadata: screenshot.metadata,
pwTraceKey: screenshot.pwTrace?.hash ?? null,
threshold: screenshot.threshold ?? config?.threshold ?? null,
baseName: screenshot.baseName
})),
parallel: config.parallel,
parallelTotal: config.parallelTotal,
parallelIndex: config.parallelIndex,
metadata: params.metadata
}
});
if (uploadBuildResponse.error) {
throwAPIError(uploadBuildResponse.error);
}
return { build: uploadBuildResponse.data.build, screenshots };
}
// src/finalize.ts
import { createClient as createClient2, throwAPIError as throwAPIError2 } from "@argos-ci/api-client";
async function finalize(params) {
const config = await readConfig({
parallelNonce: params.parallel?.nonce ?? null
});
const authToken = getAuthToken(config);
const apiClient = createClient2({
baseUrl: config.apiBaseUrl,
authToken
});
if (!config.parallelNonce) {
throw new Error("parallel.nonce is required to finalize the build");
}
const finalizeBuildsResult = await apiClient.POST("/builds/finalize", {
body: {
parallelNonce: config.parallelNonce
}
});
if (finalizeBuildsResult.error) {
throwAPIError2(finalizeBuildsResult.error);
}
return finalizeBuildsResult.data;
}
export {
finalize,
readConfig,
upload2 as upload
};