UNPKG

benchmark-meter

Version:

benchmark-meter is a straightforward benchmarking tool designed for measuring the performance of algorithms

107 lines (106 loc) 3.9 kB
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.Benchmark = void 0; const DataResult_1 = require("./DataResult"); const node_perf_hooks_1 = require("node:perf_hooks"); const exceptions_1 = require("./exceptions"); const ConfigHandler_1 = require("./config/ConfigHandler"); /** * Represents a benchmarking utility for measuring the performance of algorithms. * @since 1.0.0 */ class Benchmark { /** * Creates an instance of Benchmark. * * @param {IOptions} options - The options for configuring the benchmark. */ constructor(options = {}) { this.algorithms = []; this.results = []; this.options = ConfigHandler_1.ConfigHandler.parse(options); } /** * Adds a algorithm to the Benchmark. * * @param {string} name - The name of the algorithm. * @param {IFunction} fn - The callback function with the algorithm to be benchmarked. * @param {number | undefined} repeat - The number of times to repeat the algorithm (optional). * @throws Will throw an error if the algorithm name is already used or if the repeat count is not greater than 0. * @since 1.0.0 */ add(name, fn, repeat) { if (typeof name !== 'string') { throw new TypeError(`name must be a string. Received: ${typeof name}`); } if (this.isNameAlreadyUsed(name)) { throw new exceptions_1.DuplicateNameException(name); } if (typeof fn !== 'function') { throw new TypeError(`fn must be a function. Received: ${typeof fn}`); } ConfigHandler_1.ConfigHandler.validateType({ repeat }); ConfigHandler_1.ConfigHandler.validateValue({ repeat }); this.algorithms.push({ name, fn, repeat: repeat ?? this.options.repeat, }); } /** * Runs all added algorithms and returns the results as a DataResult instance. * * @returns {Promise<DataResult>} A promise that resolves to a DataResult instance. * @throws Will throw an error if no algorithms have been added. * @since 1.0.0 */ async run() { if (this.algorithms.length === 0) { throw new exceptions_1.NoAlgorithmsAddedException(); } for (const { name, fn, repeat } of this.algorithms) { await this.executeNTimes(name, fn, repeat); } return new DataResult_1.DataResult(this.results); } /** * Clears the results array. * @since 1.0.0 */ clearResults() { this.results = []; } /** * Clears the algorithm array. * @since 1.0.0 */ clearAlgorithms() { this.algorithms = []; } async executeNTimes(name, fn, repeat) { const durations = []; for (let i = 0; i < repeat; i += 1) { node_perf_hooks_1.performance.mark('benchmark-start'); await fn(); node_perf_hooks_1.performance.mark('benchmark-end'); const measure = node_perf_hooks_1.performance.measure(name, 'benchmark-start', 'benchmark-end'); durations.push(measure.duration); } this.calculateResults(durations, name, repeat); } calculateResults(durations, name, repeat) { const totalDuration = durations.reduce((acc, duration) => acc + duration, 0); const averageDuration = totalDuration / repeat; const sortedDurations = durations.sort((a, b) => a - b); this.results.push({ name, average: `${averageDuration.toFixed(2)}ms`, fastest: `${sortedDurations[0].toFixed(2)}ms`, slowest: `${sortedDurations[sortedDurations.length - 1].toFixed(2)}ms` }); } isNameAlreadyUsed(name) { return this.algorithms.some((algorithm) => algorithm.name === name); } } exports.Benchmark = Benchmark;