Skip to content

Commit

Permalink
Split off SuiteRunner class (#433)
Browse files Browse the repository at this point in the history
Move Suite-related code to a separate SuiteRunner class.
This happens in preparation for async and remote suites where it will be cleaner to handle the code paths independently instead of adding more functionality to the BenchmarkRunner class.
  • Loading branch information
camillobruni authored Oct 21, 2024
1 parent bb4a4ad commit 2387b9f
Show file tree
Hide file tree
Showing 2 changed files with 107 additions and 82 deletions.
165 changes: 93 additions & 72 deletions resources/benchmark-runner.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -454,8 +454,99 @@ export class BenchmarkRunner {
}

async runSuite(suite) {
await this._prepareSuite(suite);
await this._runSuite(suite);
// FIXME: Encapsulate more state in the SuiteRunner.
// FIXME: Return and use measured values from SuiteRunner.
const suiteRunner = new SuiteRunner(this._measuredValues, this._frame, this._page, this._client, suite);
await suiteRunner.run();
}

async _finalize() {
this._appendIterationMetrics();
if (this._client?.didRunSuites) {
let product = 1;
const values = [];
for (const suiteName in this._measuredValues.tests) {
const suiteTotal = this._measuredValues.tests[suiteName].total;
product *= suiteTotal;
values.push(suiteTotal);
}

values.sort((a, b) => a - b); // Avoid the loss of significance for the sum.
const total = values.reduce((a, b) => a + b);
const geomean = Math.pow(product, 1 / values.length);

this._measuredValues.total = total;
this._measuredValues.mean = total / values.length;
this._measuredValues.geomean = geomean;
this._measuredValues.score = geomeanToScore(geomean);
await this._client.didRunSuites(this._measuredValues);
}
}

_appendIterationMetrics() {
const getMetric = (name, unit = "ms") => this._metrics[name] || (this._metrics[name] = new Metric(name, unit));
const iterationTotalMetric = (i) => {
if (i >= params.iterationCount)
throw new Error(`Requested iteration=${i} does not exist.`);
return getMetric(`Iteration-${i}-Total`);
};

const collectSubMetrics = (prefix, items, parent) => {
for (let name in items) {
const results = items[name];
const metric = getMetric(prefix + name);
metric.add(results.total ?? results);
if (metric.parent !== parent)
parent.addChild(metric);
if (results.tests)
collectSubMetrics(`${metric.name}${Metric.separator}`, results.tests, metric);
}
};
const initializeMetrics = this._metrics === null;
if (initializeMetrics)
this._metrics = { __proto__: null };

const iterationResults = this._measuredValues.tests;
collectSubMetrics("", iterationResults);

if (initializeMetrics) {
// Prepare all iteration metrics so they are listed at the end of
// of the _metrics object, before "Total" and "Score".
for (let i = 0; i < this._iterationCount; i++)
iterationTotalMetric(i).description = `Test totals for iteration ${i}`;
getMetric("Geomean", "ms").description = "Geomean of test totals";
getMetric("Score", "score").description = "Scaled inverse of the Geomean";
}

const geomean = getMetric("Geomean");
const iterationTotal = iterationTotalMetric(geomean.length);
for (const results of Object.values(iterationResults))
iterationTotal.add(results.total);
iterationTotal.computeAggregatedMetrics();
geomean.add(iterationTotal.geomean);
getMetric("Score").add(geomeanToScore(iterationTotal.geomean));

for (const metric of Object.values(this._metrics))
metric.computeAggregatedMetrics();
}
}

// FIXME: Create AsyncSuiteRunner subclass.
// FIXME: Create RemoteSuiteRunner subclass.
export class SuiteRunner {
constructor(measuredValues, frame, page, client, suite) {
// FIXME: Create SuiteRunner-local measuredValues.
this._measuredValues = measuredValues;
this._frame = frame;
this._page = page;
this._client = client;
this._suite = suite;
}

async run() {
// FIXME: use this._suite in all SuiteRunner methods directly.
await this._prepareSuite(this._suite);
await this._runSuite(this._suite);
}

async _prepareSuite(suite) {
Expand Down Expand Up @@ -570,74 +661,4 @@ export class BenchmarkRunner {
if (this._client?.didRunTest)
await this._client.didRunTest(suite, test);
}

async _finalize() {
this._appendIterationMetrics();
if (this._client?.didRunSuites) {
let product = 1;
const values = [];
for (const suiteName in this._measuredValues.tests) {
const suiteTotal = this._measuredValues.tests[suiteName].total;
product *= suiteTotal;
values.push(suiteTotal);
}

values.sort((a, b) => a - b); // Avoid the loss of significance for the sum.
const total = values.reduce((a, b) => a + b);
const geomean = Math.pow(product, 1 / values.length);

this._measuredValues.total = total;
this._measuredValues.mean = total / values.length;
this._measuredValues.geomean = geomean;
this._measuredValues.score = geomeanToScore(geomean);
await this._client.didRunSuites(this._measuredValues);
}
}

_appendIterationMetrics() {
const getMetric = (name, unit = "ms") => this._metrics[name] || (this._metrics[name] = new Metric(name, unit));
const iterationTotalMetric = (i) => {
if (i >= params.iterationCount)
throw new Error(`Requested iteration=${i} does not exist.`);
return getMetric(`Iteration-${i}-Total`);
};

const collectSubMetrics = (prefix, items, parent) => {
for (let name in items) {
const results = items[name];
const metric = getMetric(prefix + name);
metric.add(results.total ?? results);
if (metric.parent !== parent)
parent.addChild(metric);
if (results.tests)
collectSubMetrics(`${metric.name}${Metric.separator}`, results.tests, metric);
}
};
const initializeMetrics = this._metrics === null;
if (initializeMetrics)
this._metrics = { __proto__: null };

const iterationResults = this._measuredValues.tests;
collectSubMetrics("", iterationResults);

if (initializeMetrics) {
// Prepare all iteration metrics so they are listed at the end of
// of the _metrics object, before "Total" and "Score".
for (let i = 0; i < this._iterationCount; i++)
iterationTotalMetric(i).description = `Test totals for iteration ${i}`;
getMetric("Geomean", "ms").description = "Geomean of test totals";
getMetric("Score", "score").description = "Scaled inverse of the Geomean";
}

const geomean = getMetric("Geomean");
const iterationTotal = iterationTotalMetric(geomean.length);
for (const results of Object.values(iterationResults))
iterationTotal.add(results.total);
iterationTotal.computeAggregatedMetrics();
geomean.add(iterationTotal.geomean);
getMetric("Score").add(geomeanToScore(iterationTotal.geomean));

for (const metric of Object.values(this._metrics))
metric.computeAggregatedMetrics();
}
}
24 changes: 14 additions & 10 deletions tests/benchmark-runner-tests.mjs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { BenchmarkRunner } from "../resources/benchmark-runner.mjs";
import { BenchmarkRunner, SuiteRunner } from "../resources/benchmark-runner.mjs";
import { defaultParams } from "../resources/params.mjs";

function TEST_FIXTURE(name) {
Expand Down Expand Up @@ -112,9 +112,9 @@ describe("BenchmarkRunner", () => {
let _runSuiteStub, _finalizeStub, _loadFrameStub, _appendFrameStub, _removeFrameStub;

before(async () => {
_runSuiteStub = stub(runner, "_runSuite").callsFake(async () => null);
_runSuiteStub = stub(SuiteRunner.prototype, "_runSuite").callsFake(async () => null);
_finalizeStub = stub(runner, "_finalize").callsFake(async () => null);
_loadFrameStub = stub(runner, "_loadFrame").callsFake(async () => null);
_loadFrameStub = stub(SuiteRunner.prototype, "_loadFrame").callsFake(async () => null);
_appendFrameStub = stub(runner, "_appendFrame").callsFake(async () => null);
_removeFrameStub = stub(runner, "_removeFrame").callsFake(() => null);
for (const suite of runner._suites)
Expand Down Expand Up @@ -148,18 +148,19 @@ describe("BenchmarkRunner", () => {
});

describe("runSuite", () => {
let _prepareSuiteSpy, _loadFrameStub, _runTestAndRecordResultsStub, _suitePrepareSpy, performanceMarkSpy;
let _prepareSuiteSpy, _loadFrameStub, _runTestAndRecordResultsStub, _validateSuiteTotalStub, _suitePrepareSpy, performanceMarkSpy;

const suite = SUITES_FIXTURE[0];

before(async () => {
_prepareSuiteSpy = spy(runner, "_prepareSuite");
_loadFrameStub = stub(runner, "_loadFrame").callsFake(async () => null);
_runTestAndRecordResultsStub = stub(runner, "_runTestAndRecordResults").callsFake(async () => null);
_prepareSuiteSpy = spy(SuiteRunner.prototype, "_prepareSuite");
_loadFrameStub = stub(SuiteRunner.prototype, "_loadFrame").callsFake(async () => null);
_runTestAndRecordResultsStub = stub(SuiteRunner.prototype, "_runTestAndRecordResults").callsFake(async () => null);
_validateSuiteTotalStub = stub(SuiteRunner.prototype, "_validateSuiteTotal").callsFake(async () => null);
performanceMarkSpy = spy(window.performance, "mark");
_suitePrepareSpy = spy(suite, "prepare");

runner.runSuite(suite);
await runner.runSuite(suite);
});

it("should prepare the suite first", async () => {
Expand All @@ -170,6 +171,7 @@ describe("BenchmarkRunner", () => {

it("should run and record results for every test in suite", async () => {
assert.calledThrice(_runTestAndRecordResultsStub);
assert.calledOnce(_validateSuiteTotalStub);
assert.calledWith(performanceMarkSpy, "suite-Suite 1-prepare-start");
assert.calledWith(performanceMarkSpy, "suite-Suite 1-prepare-end");
assert.calledWith(performanceMarkSpy, "suite-Suite 1-start");
Expand All @@ -188,7 +190,8 @@ describe("BenchmarkRunner", () => {
before(async () => {
await runner._appendFrame();
performanceMarkSpy = spy(window.performance, "mark");
await runner._runTestAndRecordResults(suite, suite.tests[0]);
const suiteRunner = new SuiteRunner(runner._measuredValues, runner._frame, runner._page, runner._client, runner._suite);
await suiteRunner._runTestAndRecordResults(suite, suite.tests[0]);
});

it("should run client pre and post hooks if present", () => {
Expand Down Expand Up @@ -222,7 +225,8 @@ describe("BenchmarkRunner", () => {
stubPerformanceNowCalls(syncStart, syncEnd, asyncStart, asyncEnd);

// instantiate recorded test results
await runner._runTestAndRecordResults(suite, suite.tests[0]);
const suiteRunner = new SuiteRunner(runner._measuredValues, runner._frame, runner._page, runner._client, runner._suite);
await suiteRunner._runTestAndRecordResults(suite, suite.tests[0]);

await runner._finalize();
});
Expand Down

0 comments on commit 2387b9f

Please sign in to comment.