benchmark: refactor helper into a class
This reverts commit 5b0308cd823a511098dadf9ddd5a35e3a9dbb424. PR-URL: https://github.com/nodejs/node/pull/31755 Reviewed-By: Anna Henningsen <anna@addaleax.net> Reviewed-By: Vladimir de Turckheim <vlad2t@hotmail.com> Reviewed-By: Rich Trott <rtrott@gmail.com>
This commit is contained in:
parent
9e69d97c60
commit
f64aafa2d5
@ -3,223 +3,226 @@
|
||||
const child_process = require('child_process');
|
||||
const http_benchmarkers = require('./_http-benchmarkers.js');
|
||||
|
||||
exports.buildType = process.features.debug ? 'Debug' : 'Release';
|
||||
class Benchmark {
|
||||
constructor(fn, configs, options) {
|
||||
// Use the file name as the name of the benchmark
|
||||
this.name = require.main.filename.slice(__dirname.length + 1);
|
||||
// Parse job-specific configuration from the command line arguments
|
||||
const parsed_args = this._parseArgs(process.argv.slice(2), configs);
|
||||
this.options = parsed_args.cli;
|
||||
this.extra_options = parsed_args.extra;
|
||||
// The configuration list as a queue of jobs
|
||||
this.queue = this._queue(this.options);
|
||||
// The configuration of the current job, head of the queue
|
||||
this.config = this.queue[0];
|
||||
// Execution arguments i.e. flags used to run the jobs
|
||||
this.flags = [];
|
||||
if (options && options.flags) {
|
||||
this.flags = this.flags.concat(options.flags);
|
||||
}
|
||||
if (process.env.NODE_BENCHMARK_FLAGS) {
|
||||
const flags = process.env.NODE_BENCHMARK_FLAGS.split(/\s+/);
|
||||
this.flags = this.flags.concat(flags);
|
||||
}
|
||||
// Holds process.hrtime value
|
||||
this._time = [0, 0];
|
||||
// Used to make sure a benchmark only start a timer once
|
||||
this._started = false;
|
||||
this._ended = false;
|
||||
|
||||
exports.createBenchmark = function(fn, configs, options) {
|
||||
return new Benchmark(fn, configs, options);
|
||||
};
|
||||
|
||||
function Benchmark(fn, configs, options) {
|
||||
// Use the file name as the name of the benchmark
|
||||
this.name = require.main.filename.slice(__dirname.length + 1);
|
||||
// Parse job-specific configuration from the command line arguments
|
||||
const parsed_args = this._parseArgs(process.argv.slice(2), configs);
|
||||
this.options = parsed_args.cli;
|
||||
this.extra_options = parsed_args.extra;
|
||||
// The configuration list as a queue of jobs
|
||||
this.queue = this._queue(this.options);
|
||||
// The configuration of the current job, head of the queue
|
||||
this.config = this.queue[0];
|
||||
// Execution arguments i.e. flags used to run the jobs
|
||||
this.flags = [];
|
||||
if (options && options.flags) {
|
||||
this.flags = this.flags.concat(options.flags);
|
||||
// this._run will use fork() to create a new process for each configuration
|
||||
// combination.
|
||||
if (process.env.hasOwnProperty('NODE_RUN_BENCHMARK_FN')) {
|
||||
process.nextTick(() => fn(this.config));
|
||||
} else {
|
||||
process.nextTick(() => this._run());
|
||||
}
|
||||
}
|
||||
if (process.env.NODE_BENCHMARK_FLAGS) {
|
||||
const flags = process.env.NODE_BENCHMARK_FLAGS.split(/\s+/);
|
||||
this.flags = this.flags.concat(flags);
|
||||
}
|
||||
// Holds process.hrtime value
|
||||
this._time = [0, 0];
|
||||
// Used to make sure a benchmark only start a timer once
|
||||
this._started = false;
|
||||
this._ended = false;
|
||||
|
||||
// this._run will use fork() to create a new process for each configuration
|
||||
// combination.
|
||||
if (process.env.hasOwnProperty('NODE_RUN_BENCHMARK_FN')) {
|
||||
process.nextTick(() => fn(this.config));
|
||||
} else {
|
||||
process.nextTick(() => this._run());
|
||||
_parseArgs(argv, configs) {
|
||||
const cliOptions = {};
|
||||
const extraOptions = {};
|
||||
const validArgRE = /^(.+?)=([\s\S]*)$/;
|
||||
// Parse configuration arguments
|
||||
for (const arg of argv) {
|
||||
const match = arg.match(validArgRE);
|
||||
if (!match) {
|
||||
console.error(`bad argument: ${arg}`);
|
||||
process.exit(1);
|
||||
}
|
||||
const config = match[1];
|
||||
|
||||
if (configs[config]) {
|
||||
// Infer the type from the config object and parse accordingly
|
||||
const isNumber = typeof configs[config][0] === 'number';
|
||||
const value = isNumber ? +match[2] : match[2];
|
||||
if (!cliOptions[config])
|
||||
cliOptions[config] = [];
|
||||
cliOptions[config].push(value);
|
||||
} else {
|
||||
extraOptions[config] = match[2];
|
||||
}
|
||||
}
|
||||
return { cli: Object.assign({}, configs, cliOptions), extra: extraOptions };
|
||||
}
|
||||
|
||||
_queue(options) {
|
||||
const queue = [];
|
||||
const keys = Object.keys(options);
|
||||
|
||||
// Perform a depth-first walk though all options to generate a
|
||||
// configuration list that contains all combinations.
|
||||
function recursive(keyIndex, prevConfig) {
|
||||
const key = keys[keyIndex];
|
||||
const values = options[key];
|
||||
const type = typeof values[0];
|
||||
|
||||
for (const value of values) {
|
||||
if (typeof value !== 'number' && typeof value !== 'string') {
|
||||
throw new TypeError(
|
||||
`configuration "${key}" had type ${typeof value}`);
|
||||
}
|
||||
if (typeof value !== type) {
|
||||
// This is a requirement for being able to consistently and
|
||||
// predictably parse CLI provided configuration values.
|
||||
throw new TypeError(`configuration "${key}" has mixed types`);
|
||||
}
|
||||
|
||||
const currConfig = Object.assign({ [key]: value }, prevConfig);
|
||||
|
||||
if (keyIndex + 1 < keys.length) {
|
||||
recursive(keyIndex + 1, currConfig);
|
||||
} else {
|
||||
queue.push(currConfig);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (keys.length > 0) {
|
||||
recursive(0, {});
|
||||
} else {
|
||||
queue.push({});
|
||||
}
|
||||
|
||||
return queue;
|
||||
}
|
||||
|
||||
http(options, cb) {
|
||||
const self = this;
|
||||
const http_options = Object.assign({ }, options);
|
||||
http_options.benchmarker = http_options.benchmarker ||
|
||||
self.config.benchmarker ||
|
||||
self.extra_options.benchmarker ||
|
||||
exports.default_http_benchmarker;
|
||||
http_benchmarkers.run(
|
||||
http_options, (error, code, used_benchmarker, result, elapsed) => {
|
||||
if (cb) {
|
||||
cb(code);
|
||||
}
|
||||
if (error) {
|
||||
console.error(error);
|
||||
process.exit(code || 1);
|
||||
}
|
||||
self.config.benchmarker = used_benchmarker;
|
||||
self.report(result, elapsed);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
_run() {
|
||||
const self = this;
|
||||
// If forked, report to the parent.
|
||||
if (process.send) {
|
||||
process.send({
|
||||
type: 'config',
|
||||
name: this.name,
|
||||
queueLength: this.queue.length,
|
||||
});
|
||||
}
|
||||
|
||||
(function recursive(queueIndex) {
|
||||
const config = self.queue[queueIndex];
|
||||
|
||||
// Set NODE_RUN_BENCHMARK_FN to indicate that the child shouldn't
|
||||
// construct a configuration queue, but just execute the benchmark
|
||||
// function.
|
||||
const childEnv = Object.assign({}, process.env);
|
||||
childEnv.NODE_RUN_BENCHMARK_FN = '';
|
||||
|
||||
// Create configuration arguments
|
||||
const childArgs = [];
|
||||
for (const key of Object.keys(config)) {
|
||||
childArgs.push(`${key}=${config[key]}`);
|
||||
}
|
||||
for (const key of Object.keys(self.extra_options)) {
|
||||
childArgs.push(`${key}=${self.extra_options[key]}`);
|
||||
}
|
||||
|
||||
const child = child_process.fork(require.main.filename, childArgs, {
|
||||
env: childEnv,
|
||||
execArgv: self.flags.concat(process.execArgv),
|
||||
});
|
||||
child.on('message', sendResult);
|
||||
child.on('close', (code) => {
|
||||
if (code) {
|
||||
process.exit(code);
|
||||
}
|
||||
|
||||
if (queueIndex + 1 < self.queue.length) {
|
||||
recursive(queueIndex + 1);
|
||||
}
|
||||
});
|
||||
})(0);
|
||||
}
|
||||
|
||||
start() {
|
||||
if (this._started) {
|
||||
throw new Error('Called start more than once in a single benchmark');
|
||||
}
|
||||
this._started = true;
|
||||
this._time = process.hrtime();
|
||||
}
|
||||
|
||||
end(operations) {
|
||||
// Get elapsed time now and do error checking later for accuracy.
|
||||
const elapsed = process.hrtime(this._time);
|
||||
|
||||
if (!this._started) {
|
||||
throw new Error('called end without start');
|
||||
}
|
||||
if (this._ended) {
|
||||
throw new Error('called end multiple times');
|
||||
}
|
||||
if (typeof operations !== 'number') {
|
||||
throw new Error('called end() without specifying operation count');
|
||||
}
|
||||
if (!process.env.NODEJS_BENCHMARK_ZERO_ALLOWED && operations <= 0) {
|
||||
throw new Error('called end() with operation count <= 0');
|
||||
}
|
||||
if (elapsed[0] === 0 && elapsed[1] === 0) {
|
||||
if (!process.env.NODEJS_BENCHMARK_ZERO_ALLOWED)
|
||||
throw new Error('insufficient clock precision for short benchmark');
|
||||
// Avoid dividing by zero
|
||||
elapsed[1] = 1;
|
||||
}
|
||||
|
||||
this._ended = true;
|
||||
const time = elapsed[0] + elapsed[1] / 1e9;
|
||||
const rate = operations / time;
|
||||
this.report(rate, elapsed);
|
||||
}
|
||||
|
||||
report(rate, elapsed) {
|
||||
sendResult({
|
||||
name: this.name,
|
||||
conf: this.config,
|
||||
rate: rate,
|
||||
time: elapsed[0] + elapsed[1] / 1e9,
|
||||
type: 'report',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Benchmark.prototype._parseArgs = function(argv, configs) {
|
||||
const cliOptions = {};
|
||||
const extraOptions = {};
|
||||
const validArgRE = /^(.+?)=([\s\S]*)$/;
|
||||
// Parse configuration arguments
|
||||
for (const arg of argv) {
|
||||
const match = arg.match(validArgRE);
|
||||
if (!match) {
|
||||
console.error(`bad argument: ${arg}`);
|
||||
process.exit(1);
|
||||
}
|
||||
const config = match[1];
|
||||
|
||||
if (configs[config]) {
|
||||
// Infer the type from the config object and parse accordingly
|
||||
const isNumber = typeof configs[config][0] === 'number';
|
||||
const value = isNumber ? +match[2] : match[2];
|
||||
if (!cliOptions[config])
|
||||
cliOptions[config] = [];
|
||||
cliOptions[config].push(value);
|
||||
} else {
|
||||
extraOptions[config] = match[2];
|
||||
}
|
||||
}
|
||||
return { cli: Object.assign({}, configs, cliOptions), extra: extraOptions };
|
||||
};
|
||||
|
||||
Benchmark.prototype._queue = function(options) {
|
||||
const queue = [];
|
||||
const keys = Object.keys(options);
|
||||
|
||||
// Perform a depth-first walk though all options to generate a
|
||||
// configuration list that contains all combinations.
|
||||
function recursive(keyIndex, prevConfig) {
|
||||
const key = keys[keyIndex];
|
||||
const values = options[key];
|
||||
const type = typeof values[0];
|
||||
|
||||
for (const value of values) {
|
||||
if (typeof value !== 'number' && typeof value !== 'string') {
|
||||
throw new TypeError(`configuration "${key}" had type ${typeof value}`);
|
||||
}
|
||||
if (typeof value !== type) {
|
||||
// This is a requirement for being able to consistently and predictably
|
||||
// parse CLI provided configuration values.
|
||||
throw new TypeError(`configuration "${key}" has mixed types`);
|
||||
}
|
||||
|
||||
const currConfig = Object.assign({ [key]: value }, prevConfig);
|
||||
|
||||
if (keyIndex + 1 < keys.length) {
|
||||
recursive(keyIndex + 1, currConfig);
|
||||
} else {
|
||||
queue.push(currConfig);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (keys.length > 0) {
|
||||
recursive(0, {});
|
||||
} else {
|
||||
queue.push({});
|
||||
}
|
||||
|
||||
return queue;
|
||||
};
|
||||
|
||||
// Benchmark an http server.
|
||||
exports.default_http_benchmarker =
|
||||
http_benchmarkers.default_http_benchmarker;
|
||||
exports.PORT = http_benchmarkers.PORT;
|
||||
|
||||
Benchmark.prototype.http = function(options, cb) {
|
||||
const self = this;
|
||||
const http_options = Object.assign({ }, options);
|
||||
http_options.benchmarker = http_options.benchmarker ||
|
||||
self.config.benchmarker ||
|
||||
self.extra_options.benchmarker ||
|
||||
exports.default_http_benchmarker;
|
||||
http_benchmarkers.run(
|
||||
http_options, (error, code, used_benchmarker, result, elapsed) => {
|
||||
if (cb) {
|
||||
cb(code);
|
||||
}
|
||||
if (error) {
|
||||
console.error(error);
|
||||
process.exit(code || 1);
|
||||
}
|
||||
self.config.benchmarker = used_benchmarker;
|
||||
self.report(result, elapsed);
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
Benchmark.prototype._run = function() {
|
||||
const self = this;
|
||||
// If forked, report to the parent.
|
||||
if (process.send) {
|
||||
process.send({
|
||||
type: 'config',
|
||||
name: this.name,
|
||||
queueLength: this.queue.length,
|
||||
});
|
||||
}
|
||||
|
||||
(function recursive(queueIndex) {
|
||||
const config = self.queue[queueIndex];
|
||||
|
||||
// Set NODE_RUN_BENCHMARK_FN to indicate that the child shouldn't construct
|
||||
// a configuration queue, but just execute the benchmark function.
|
||||
const childEnv = Object.assign({}, process.env);
|
||||
childEnv.NODE_RUN_BENCHMARK_FN = '';
|
||||
|
||||
// Create configuration arguments
|
||||
const childArgs = [];
|
||||
for (const key of Object.keys(config)) {
|
||||
childArgs.push(`${key}=${config[key]}`);
|
||||
}
|
||||
for (const key of Object.keys(self.extra_options)) {
|
||||
childArgs.push(`${key}=${self.extra_options[key]}`);
|
||||
}
|
||||
|
||||
const child = child_process.fork(require.main.filename, childArgs, {
|
||||
env: childEnv,
|
||||
execArgv: self.flags.concat(process.execArgv),
|
||||
});
|
||||
child.on('message', sendResult);
|
||||
child.on('close', (code) => {
|
||||
if (code) {
|
||||
process.exit(code);
|
||||
}
|
||||
|
||||
if (queueIndex + 1 < self.queue.length) {
|
||||
recursive(queueIndex + 1);
|
||||
}
|
||||
});
|
||||
})(0);
|
||||
};
|
||||
|
||||
Benchmark.prototype.start = function() {
|
||||
if (this._started) {
|
||||
throw new Error('Called start more than once in a single benchmark');
|
||||
}
|
||||
this._started = true;
|
||||
this._time = process.hrtime();
|
||||
};
|
||||
|
||||
Benchmark.prototype.end = function(operations) {
|
||||
// Get elapsed time now and do error checking later for accuracy.
|
||||
const elapsed = process.hrtime(this._time);
|
||||
|
||||
if (!this._started) {
|
||||
throw new Error('called end without start');
|
||||
}
|
||||
if (this._ended) {
|
||||
throw new Error('called end multiple times');
|
||||
}
|
||||
if (typeof operations !== 'number') {
|
||||
throw new Error('called end() without specifying operation count');
|
||||
}
|
||||
if (!process.env.NODEJS_BENCHMARK_ZERO_ALLOWED && operations <= 0) {
|
||||
throw new Error('called end() with operation count <= 0');
|
||||
}
|
||||
if (elapsed[0] === 0 && elapsed[1] === 0) {
|
||||
if (!process.env.NODEJS_BENCHMARK_ZERO_ALLOWED)
|
||||
throw new Error('insufficient clock precision for short benchmark');
|
||||
// Avoid dividing by zero
|
||||
elapsed[1] = 1;
|
||||
}
|
||||
|
||||
this._ended = true;
|
||||
const time = elapsed[0] + elapsed[1] / 1e9;
|
||||
const rate = operations / time;
|
||||
this.report(rate, elapsed);
|
||||
};
|
||||
|
||||
function formatResult(data) {
|
||||
// Construct configuration string, " A=a, B=b, ..."
|
||||
let conf = '';
|
||||
@ -242,27 +245,6 @@ function sendResult(data) {
|
||||
console.log(formatResult(data));
|
||||
}
|
||||
}
|
||||
exports.sendResult = sendResult;
|
||||
|
||||
Benchmark.prototype.report = function(rate, elapsed) {
|
||||
sendResult({
|
||||
name: this.name,
|
||||
conf: this.config,
|
||||
rate: rate,
|
||||
time: elapsed[0] + elapsed[1] / 1e9,
|
||||
type: 'report',
|
||||
});
|
||||
};
|
||||
|
||||
exports.binding = function(bindingName) {
|
||||
try {
|
||||
const { internalBinding } = require('internal/test/binding');
|
||||
|
||||
return internalBinding(bindingName);
|
||||
} catch {
|
||||
return process.binding(bindingName);
|
||||
}
|
||||
};
|
||||
|
||||
const urls = {
|
||||
long: 'http://nodejs.org:89/docs/latest/api/foo/bar/qua/13949281/0f28b/' +
|
||||
@ -278,7 +260,6 @@ const urls = {
|
||||
percent: 'https://%E4%BD%A0/foo',
|
||||
dot: 'https://example.org/./a/../b/./c',
|
||||
};
|
||||
exports.urls = urls;
|
||||
|
||||
const searchParams = {
|
||||
noencode: 'foo=bar&baz=quux&xyzzy=thud',
|
||||
@ -293,7 +274,6 @@ const searchParams = {
|
||||
manyblankpairs: '&&&&&&&&&&&&&&&&&&&&&&&&',
|
||||
altspaces: 'foo+bar=baz+quux&xyzzy+thud=quuy+quuz&abc=def+ghi',
|
||||
};
|
||||
exports.searchParams = searchParams;
|
||||
|
||||
function getUrlData(withBase) {
|
||||
const data = require('../test/fixtures/wpt/url/resources/urltestdata.json');
|
||||
@ -309,8 +289,6 @@ function getUrlData(withBase) {
|
||||
return result;
|
||||
}
|
||||
|
||||
exports.urlDataTypes = Object.keys(urls).concat(['wpt']);
|
||||
|
||||
/**
|
||||
* Generate an array of data for URL benchmarks to use.
|
||||
* The size of the resulting data set is the original data size * 2 ** `e`.
|
||||
@ -354,4 +332,27 @@ function bakeUrlData(type, e = 0, withBase = false, asUrl = false) {
|
||||
}
|
||||
return result;
|
||||
}
|
||||
exports.bakeUrlData = bakeUrlData;
|
||||
|
||||
module.exports = {
|
||||
PORT: http_benchmarkers.PORT,
|
||||
bakeUrlData,
|
||||
binding(bindingName) {
|
||||
try {
|
||||
const { internalBinding } = require('internal/test/binding');
|
||||
|
||||
return internalBinding(bindingName);
|
||||
} catch {
|
||||
return process.binding(bindingName);
|
||||
}
|
||||
},
|
||||
buildType: process.features.debug ? 'Debug' : 'Release',
|
||||
createBenchmark(fn, configs, options) {
|
||||
return new Benchmark(fn, configs, options);
|
||||
},
|
||||
// Benchmark an http server.
|
||||
default_http_benchmarker: http_benchmarkers.default_http_benchmarker,
|
||||
sendResult,
|
||||
searchParams,
|
||||
urlDataTypes: Object.keys(urls).concat(['wpt']),
|
||||
urls,
|
||||
};
|
||||
|
Loading…
x
Reference in New Issue
Block a user