speedo
Version:
Simple performance testing tool using SauceLabs
156 lines (125 loc) • 4.48 kB
JavaScript
;
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.handler = exports.builder = exports.desc = exports.command = void 0;
var _ora = _interopRequireDefault(require("ora"));
var _yargs = _interopRequireDefault(require("yargs"));
var _saucelabs = _interopRequireDefault(require("saucelabs"));
var _utils = require("../utils");
var _constants = require("../constants");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const command = 'analyze [params...] <jobName>';
exports.command = command;
const desc = 'Analyze results of prerun performance tests.';
exports.desc = desc;
const builder = _constants.ANALYZE_CLI_PARAMS;
exports.builder = builder;
const handler = async argv => {
const username = argv.user || process.env.SAUCE_USERNAME;
const accessKey = argv.key || process.env.SAUCE_ACCESS_KEY;
const config = (0, _utils.getConfig)(argv);
const budget = config ? config.budget : null;
const metrics = budget ? (0, _utils.getBudgetMetrics)(budget) : (0, _utils.getMetricParams)(argv, budget);
/**
* check if username and access key are available
*/
if (!username || !accessKey) {
_yargs.default.showHelp(); // eslint-disable-next-line no-console
console.error(_constants.ERROR_MISSING_CREDENTIALS);
return process.exit(1);
}
const user = new _saucelabs.default({
user: username,
key: accessKey,
region: argv.region
});
const status = (0, _ora.default)(`Fetch job "${argv.jobName}" from ${username}`).start();
/**
* fetch job
*/
let job;
try {
const {
jobs
} = await (0, _utils.waitFor)(() => user.listJobs(username, {
name: argv.jobName,
limit: 1
}),
/* istanbul ignore next */
({
jobs
}) => jobs && jobs.length > 0, 'Couldn\'t find job in database');
job = jobs.pop();
/**
* error out if job didn't complete
*/
if (job.error) {
throw new Error('job failed or did\'t complete');
}
status.succeed();
} catch (e) {
status.fail(`Couldn't fetch job with name "${argv.jobName}": ${e.stack}`);
return process.exit(1);
}
job.name = argv.jobName;
status.start('Analyze performance of job');
const jobResult = {
id: job.id,
name: job.name,
url: (0, _utils.getJobUrl)(argv, job.id),
results: []
};
try {
const performanceMetrics = await (0, _utils.waitFor)(() => user.getPerformanceMetricsByJobId(job.id),
/* istanbul ignore next */
performanceMetrics => performanceMetrics.items.length !== 0);
/**
* filter by page url if given
*/
if (argv.pageUrl) {
performanceMetrics.items = performanceMetrics.items.filter(perfMetric => perfMetric.page_url === argv.pageUrl);
}
/**
* filter by order index if given
*/
if (typeof argv.orderIndex === 'number') {
performanceMetrics.items = performanceMetrics.items.filter(perfMetric => perfMetric.order_index === argv.orderIndex);
}
for (const pageLoadMetric of performanceMetrics.items) {
const results = {};
const baselineHistory = budget ? (0, _utils.prepareBudgetData)(budget) : await user.getBaselineHistory(job.id, {
metricNames: _constants.PERFORMANCE_METRICS,
orderIndex: pageLoadMetric.order_index
});
for (const [metricName, baseline] of Object.entries(baselineHistory)) {
const capturedValue = pageLoadMetric.metric_data[metricName];
const result = {
metric: metricName,
passed: true,
value: capturedValue || 0,
baseline: baseline[0]
};
if (metrics.includes(metricName) && (baseline[0].u < capturedValue || baseline[0].l > capturedValue)) {
result.passed = false;
}
results[metricName] = result;
}
jobResult.results.push({
orderIndex: pageLoadMetric.order_index,
url: pageLoadMetric.page_url,
passed: Object.values(results).every(r => r.passed),
// pass only if no failing metrics
metrics: results
});
} // pass only if no failing results
jobResult.passed = Object.values(jobResult.results).every(r => r.passed);
status.succeed();
} catch (e) {
status.fail(`Couldn't fetch performance results: ${e.stack}`);
return process.exit(1);
}
(0, _utils.analyzeReport)(jobResult, metrics);
process.exit(jobResult.passed ? 0 : 1);
};
exports.handler = handler;