@best/runner-abstract
Version:
Best Runner (Abstract)
106 lines • 4.58 kB
JavaScript
/*
* Copyright (c) 2019, salesforce.com, inc.
* All rights reserved.
* SPDX-License-Identifier: MIT
* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/MIT
*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const fs_1 = __importDefault(require("fs"));
const path_1 = require("path");
const express_1 = __importDefault(require("express"));
const utils_1 = require("@best/utils");
const cross_origin_isolation_1 = require("./cross-origin-isolation");
class AbstractRunner {
static async getBrowserSpecs() {
throw new Error('Runner must implement getBrowserSpecs');
}
static isRemote = false;
initializeServer(benchmarkEntry, projectConfig) {
const { assets, useHttp } = projectConfig;
if (!useHttp) {
return Promise.resolve({ url: `file://${benchmarkEntry}`, terminate: () => { } });
}
return new Promise((resolve) => {
const app = (0, express_1.default)();
app.use((0, cross_origin_isolation_1.crossOriginIsolation)());
app.use(express_1.default.static((0, path_1.dirname)(benchmarkEntry)));
if (Array.isArray(assets)) {
for (const { path: assetDir, alias } of assets) {
if (!assetDir || !fs_1.default.existsSync(assetDir)) {
throw new Error(`Invalid asset path: '${assetDir}'`);
}
if (alias) {
app.use(`/${alias}`, express_1.default.static(assetDir));
}
else {
app.use(express_1.default.static(assetDir));
}
}
}
const server = app.listen(() => {
const { port } = server.address();
resolve({
url: `http://127.0.0.1:${port}/${(0, path_1.basename)(benchmarkEntry)}`,
terminate: () => {
server.close();
},
});
});
});
}
getRuntimeOptions(projectConfig) {
const { benchmarkIterations, benchmarkOnClient, benchmarkMaxDuration, benchmarkMinIterations } = projectConfig;
const definedIterations = Number.isInteger(benchmarkIterations);
let useMacroTaskAfterBenchmark = true;
/*
* If users only want to measure the 'script' performance
* (i.e they specified `metrics: ['script']` in their
* `best.config.js` file), speed up the tests by making
* Best avoid adding the extra macrotask (intended for
* measuring style/layout) to the benchmark iterations.
*/
if (projectConfig.metrics.length === 1 && projectConfig.metrics.includes('script')) {
useMacroTaskAfterBenchmark = false;
}
// For benchmarking on the client or a defined number of iterations duration is irrelevant
const maxDuration = definedIterations ? 1 : benchmarkMaxDuration;
const minSampleCount = definedIterations ? benchmarkIterations : benchmarkMinIterations;
return {
maxDuration,
minSampleCount,
iterations: benchmarkIterations,
iterateOnClient: benchmarkOnClient,
useMacroTaskAfterBenchmark,
};
}
async getEnvironment(browser, projectConfig, globalConfig) {
const { system, cpu, os, load } = await (0, utils_1.getSystemInfo)();
const { benchmarkOnClient, benchmarkRunner, benchmarkEnvironment, benchmarkIterations, projectName } = projectConfig;
return {
hardware: { system, cpu, os },
container: { load },
browser,
configuration: {
project: {
projectName,
benchmarkOnClient,
benchmarkRunner,
benchmarkEnvironment,
benchmarkIterations,
},
global: {
gitCommitHash: globalConfig.gitInfo.lastCommit.hash,
gitHasLocalChanges: globalConfig.gitInfo.localChanges,
gitBranch: globalConfig.gitInfo.branch,
gitRepository: globalConfig.gitInfo.repo,
},
},
};
}
}
exports.default = AbstractRunner;
//# sourceMappingURL=index.js.map
;