UNPKG

@dapplion/benchmark

Version:

Ensures that new code does not introduce performance regressions with CI. Tracks:

163 lines (162 loc) 5.11 kB
import path from "node:path"; import S3 from "aws-sdk/clients/s3.js"; import { fromCsv, toCsv, extendError } from "../utils/index.js"; import { HistoryProviderType } from "./provider.js"; const historyDir = "history"; const latestDir = "latest"; const MAX_ITEMS_TO_LIST = 100; export class S3HistoryProvider { config; type = HistoryProviderType.S3; s3; constructor(config) { this.config = config; this.s3 = new S3(config); } /** * Automatically loads credentials from ENV * * Custom ENVs * * S3_ACCESS_KEY * S3_SECRET_KEY * S3_REGION (optional) * S3_ENDPOINT (optional) * * AWS ENVs * * AWS_ACCESS_KEY_ID * AWS_SECRET_ACCESS_KEY * AWS_SESSION_TOKEN (optional) * * https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/loading-node-credentials-environment.html */ static fromEnv() { const { S3_ACCESS_KEY, S3_SECRET_KEY, S3_REGION, S3_BUCKET, S3_ENDPOINT, S3_KEY_PREFIX } = process.env; if (!S3_BUCKET) throw Error("No ENV S3_BUCKET"); if (!S3_KEY_PREFIX) throw Error("No ENV S3_KEY_PREFIX"); // S3_ACCESS_KEY is optional // S3_SECRET_KEY is optional // S3_REGION is optional // S3_ENDPOINT is optional return new S3HistoryProvider({ accessKeyId: ifSet(S3_ACCESS_KEY), secretAccessKey: ifSet(S3_SECRET_KEY), region: ifSet(S3_REGION), Bucket: S3_BUCKET, endpoint: ifSet(S3_ENDPOINT), keyPrefix: S3_KEY_PREFIX, }); } providerInfo() { return `S3HistoryProvider, Bucket ${this.config.Bucket}`; } async readLatestInBranch(branch) { const key = this.getLatestInBranchKey(branch); return await this.readBenchFileIfExists(key); } async writeLatestInBranch(branch, benchmark) { const key = this.getLatestInBranchKey(branch); await this.writeBenchFile(key, benchmark); } async readHistory(maxItems = MAX_ITEMS_TO_LIST) { const objects = await this.s3 .listObjects({ Prefix: this.getHistoryDir(), Bucket: this.config.Bucket, MaxKeys: maxItems, }) .promise() .catch((e) => { throw extendError(e, "Error on listObjects"); }); if (!objects.Contents) { throw Error("s3 response.Contents is falsy"); } const keys = []; for (const obj of objects.Contents) { if (obj.Key) { keys.push(obj.Key); } } return await Promise.all(keys.map(async (key) => this.readBenchFile(key))); } async readHistoryCommit(commitSha) { const key = this.getHistoryCommitKey(commitSha); return await this.readBenchFileIfExists(key); } async writeToHistory(benchmark) { const key = this.getHistoryCommitKey(benchmark.commitSha); await this.writeBenchFile(key, benchmark); } async readBenchFileIfExists(key) { try { return await this.readBenchFile(key); } catch (e) { // Found with trial an error // NoSuchKey: Error on getObject latest/main: null if (e.code === "NoSuchKey") { return null; } else { throw e; } } } async readBenchFile(key) { const res = await this.s3 .getObject({ Bucket: this.config.Bucket, Key: key, }) .promise() .catch((e) => { throw extendError(e, `Error on getObject ${key}`); }); if (!res.Body) { throw Error("s3 response.Body is falsy"); } let str; if (typeof res.Body === "string") { str = res.Body; } else { const buff = Buffer.from(res.Body); str = buff.toString("utf8"); } const { data, metadata } = fromCsv(str); const csvMeta = metadata; return { commitSha: csvMeta.commit, results: data }; } /** Write result to CSV + metadata as Embedded Metadata */ async writeBenchFile(key, benchmark) { const csvMeta = { commit: benchmark.commitSha }; const str = toCsv(benchmark.results, csvMeta); await this.s3 .upload({ Bucket: this.config.Bucket, Body: str, Key: key, }) .promise() .catch((e) => { throw extendError(e, `Error on upload ${key}`); }); } getLatestInBranchKey(branch) { return path.join(this.config.keyPrefix, latestDir, branch); } getHistoryCommitKey(commitSha) { return path.join(this.getHistoryDir(), commitSha); } getHistoryDir() { return path.join(this.config.keyPrefix, historyDir); } } /** Prevent returning empty strings to JS app layer */ function ifSet(str) { return str ? str : undefined; }