diff --git a/resources/benchmark-runner.mjs b/resources/benchmark-runner.mjs index 8a28c2e66..e791f4a80 100644 --- a/resources/benchmark-runner.mjs +++ b/resources/benchmark-runner.mjs @@ -454,8 +454,99 @@ export class BenchmarkRunner { } async runSuite(suite) { - await this._prepareSuite(suite); - await this._runSuite(suite); + // FIXME: Encapsulate more state in the SuiteRunner. + // FIXME: Return and use measured values from SuiteRunner. + const suiteRunner = new SuiteRunner(this._measuredValues, this._frame, this._page, this._client, suite); + await suiteRunner.run(); + } + + async _finalize() { + this._appendIterationMetrics(); + if (this._client?.didRunSuites) { + let product = 1; + const values = []; + for (const suiteName in this._measuredValues.tests) { + const suiteTotal = this._measuredValues.tests[suiteName].total; + product *= suiteTotal; + values.push(suiteTotal); + } + + values.sort((a, b) => a - b); // Avoid the loss of significance for the sum. + const total = values.reduce((a, b) => a + b); + const geomean = Math.pow(product, 1 / values.length); + + this._measuredValues.total = total; + this._measuredValues.mean = total / values.length; + this._measuredValues.geomean = geomean; + this._measuredValues.score = geomeanToScore(geomean); + await this._client.didRunSuites(this._measuredValues); + } + } + + _appendIterationMetrics() { + const getMetric = (name, unit = "ms") => this._metrics[name] || (this._metrics[name] = new Metric(name, unit)); + const iterationTotalMetric = (i) => { + if (i >= params.iterationCount) + throw new Error(`Requested iteration=${i} does not exist.`); + return getMetric(`Iteration-${i}-Total`); + }; + + const collectSubMetrics = (prefix, items, parent) => { + for (let name in items) { + const results = items[name]; + const metric = getMetric(prefix + name); + metric.add(results.total ?? results); + if (metric.parent !== parent) + parent.addChild(metric); + if (results.tests) + collectSubMetrics(`${metric.name}${Metric.separator}`, results.tests, metric); + } + }; + const initializeMetrics = this._metrics === null; + if (initializeMetrics) + this._metrics = { __proto__: null }; + + const iterationResults = this._measuredValues.tests; + collectSubMetrics("", iterationResults); + + if (initializeMetrics) { + // Prepare all iteration metrics so they are listed at the end of + // of the _metrics object, before "Total" and "Score". + for (let i = 0; i < this._iterationCount; i++) + iterationTotalMetric(i).description = `Test totals for iteration ${i}`; + getMetric("Geomean", "ms").description = "Geomean of test totals"; + getMetric("Score", "score").description = "Scaled inverse of the Geomean"; + } + + const geomean = getMetric("Geomean"); + const iterationTotal = iterationTotalMetric(geomean.length); + for (const results of Object.values(iterationResults)) + iterationTotal.add(results.total); + iterationTotal.computeAggregatedMetrics(); + geomean.add(iterationTotal.geomean); + getMetric("Score").add(geomeanToScore(iterationTotal.geomean)); + + for (const metric of Object.values(this._metrics)) + metric.computeAggregatedMetrics(); + } +} + +// FIXME: Create AsyncSuiteRunner subclass. +// FIXME: Create RemoteSuiteRunner subclass. +export class SuiteRunner { + constructor(measuredValues, frame, page, client, suite) { + // FIXME: Create SuiteRunner-local measuredValues. + this._measuredValues = measuredValues; + this._frame = frame; + this._page = page; + this._client = client; + this._suite = suite; + } + + async run() { + // FIXME: use this._suite in all SuiteRunner methods directly. + await this._prepareSuite(this._suite); + await this._runSuite(this._suite); } async _prepareSuite(suite) { @@ -570,74 +661,4 @@ export class BenchmarkRunner { if (this._client?.didRunTest) await this._client.didRunTest(suite, test); } - - async _finalize() { - this._appendIterationMetrics(); - if (this._client?.didRunSuites) { - let product = 1; - const values = []; - for (const suiteName in this._measuredValues.tests) { - const suiteTotal = this._measuredValues.tests[suiteName].total; - product *= suiteTotal; - values.push(suiteTotal); - } - - values.sort((a, b) => a - b); // Avoid the loss of significance for the sum. - const total = values.reduce((a, b) => a + b); - const geomean = Math.pow(product, 1 / values.length); - - this._measuredValues.total = total; - this._measuredValues.mean = total / values.length; - this._measuredValues.geomean = geomean; - this._measuredValues.score = geomeanToScore(geomean); - await this._client.didRunSuites(this._measuredValues); - } - } - - _appendIterationMetrics() { - const getMetric = (name, unit = "ms") => this._metrics[name] || (this._metrics[name] = new Metric(name, unit)); - const iterationTotalMetric = (i) => { - if (i >= params.iterationCount) - throw new Error(`Requested iteration=${i} does not exist.`); - return getMetric(`Iteration-${i}-Total`); - }; - - const collectSubMetrics = (prefix, items, parent) => { - for (let name in items) { - const results = items[name]; - const metric = getMetric(prefix + name); - metric.add(results.total ?? results); - if (metric.parent !== parent) - parent.addChild(metric); - if (results.tests) - collectSubMetrics(`${metric.name}${Metric.separator}`, results.tests, metric); - } - }; - const initializeMetrics = this._metrics === null; - if (initializeMetrics) - this._metrics = { __proto__: null }; - - const iterationResults = this._measuredValues.tests; - collectSubMetrics("", iterationResults); - - if (initializeMetrics) { - // Prepare all iteration metrics so they are listed at the end of - // of the _metrics object, before "Total" and "Score". - for (let i = 0; i < this._iterationCount; i++) - iterationTotalMetric(i).description = `Test totals for iteration ${i}`; - getMetric("Geomean", "ms").description = "Geomean of test totals"; - getMetric("Score", "score").description = "Scaled inverse of the Geomean"; - } - - const geomean = getMetric("Geomean"); - const iterationTotal = iterationTotalMetric(geomean.length); - for (const results of Object.values(iterationResults)) - iterationTotal.add(results.total); - iterationTotal.computeAggregatedMetrics(); - geomean.add(iterationTotal.geomean); - getMetric("Score").add(geomeanToScore(iterationTotal.geomean)); - - for (const metric of Object.values(this._metrics)) - metric.computeAggregatedMetrics(); - } } diff --git a/tests/benchmark-runner-tests.mjs b/tests/benchmark-runner-tests.mjs index 4e978dc38..b11bde160 100644 --- a/tests/benchmark-runner-tests.mjs +++ b/tests/benchmark-runner-tests.mjs @@ -1,4 +1,4 @@ -import { BenchmarkRunner } from "../resources/benchmark-runner.mjs"; +import { BenchmarkRunner, SuiteRunner } from "../resources/benchmark-runner.mjs"; import { defaultParams } from "../resources/params.mjs"; function TEST_FIXTURE(name) { @@ -112,9 +112,9 @@ describe("BenchmarkRunner", () => { let _runSuiteStub, _finalizeStub, _loadFrameStub, _appendFrameStub, _removeFrameStub; before(async () => { - _runSuiteStub = stub(runner, "_runSuite").callsFake(async () => null); + _runSuiteStub = stub(SuiteRunner.prototype, "_runSuite").callsFake(async () => null); _finalizeStub = stub(runner, "_finalize").callsFake(async () => null); - _loadFrameStub = stub(runner, "_loadFrame").callsFake(async () => null); + _loadFrameStub = stub(SuiteRunner.prototype, "_loadFrame").callsFake(async () => null); _appendFrameStub = stub(runner, "_appendFrame").callsFake(async () => null); _removeFrameStub = stub(runner, "_removeFrame").callsFake(() => null); for (const suite of runner._suites) @@ -148,18 +148,19 @@ describe("BenchmarkRunner", () => { }); describe("runSuite", () => { - let _prepareSuiteSpy, _loadFrameStub, _runTestAndRecordResultsStub, _suitePrepareSpy, performanceMarkSpy; + let _prepareSuiteSpy, _loadFrameStub, _runTestAndRecordResultsStub, _validateSuiteTotalStub, _suitePrepareSpy, performanceMarkSpy; const suite = SUITES_FIXTURE[0]; before(async () => { - _prepareSuiteSpy = spy(runner, "_prepareSuite"); - _loadFrameStub = stub(runner, "_loadFrame").callsFake(async () => null); - _runTestAndRecordResultsStub = stub(runner, "_runTestAndRecordResults").callsFake(async () => null); + _prepareSuiteSpy = spy(SuiteRunner.prototype, "_prepareSuite"); + _loadFrameStub = stub(SuiteRunner.prototype, "_loadFrame").callsFake(async () => null); + _runTestAndRecordResultsStub = stub(SuiteRunner.prototype, "_runTestAndRecordResults").callsFake(async () => null); + _validateSuiteTotalStub = stub(SuiteRunner.prototype, "_validateSuiteTotal").callsFake(async () => null); performanceMarkSpy = spy(window.performance, "mark"); _suitePrepareSpy = spy(suite, "prepare"); - runner.runSuite(suite); + await runner.runSuite(suite); }); it("should prepare the suite first", async () => { @@ -170,6 +171,7 @@ describe("BenchmarkRunner", () => { it("should run and record results for every test in suite", async () => { assert.calledThrice(_runTestAndRecordResultsStub); + assert.calledOnce(_validateSuiteTotalStub); assert.calledWith(performanceMarkSpy, "suite-Suite 1-prepare-start"); assert.calledWith(performanceMarkSpy, "suite-Suite 1-prepare-end"); assert.calledWith(performanceMarkSpy, "suite-Suite 1-start"); @@ -188,7 +190,8 @@ describe("BenchmarkRunner", () => { before(async () => { await runner._appendFrame(); performanceMarkSpy = spy(window.performance, "mark"); - await runner._runTestAndRecordResults(suite, suite.tests[0]); + const suiteRunner = new SuiteRunner(runner._measuredValues, runner._frame, runner._page, runner._client, runner._suite); + await suiteRunner._runTestAndRecordResults(suite, suite.tests[0]); }); it("should run client pre and post hooks if present", () => { @@ -222,7 +225,8 @@ describe("BenchmarkRunner", () => { stubPerformanceNowCalls(syncStart, syncEnd, asyncStart, asyncEnd); // instantiate recorded test results - await runner._runTestAndRecordResults(suite, suite.tests[0]); + const suiteRunner = new SuiteRunner(runner._measuredValues, runner._frame, runner._page, runner._client, runner._suite); + await suiteRunner._runTestAndRecordResults(suite, suite.tests[0]); await runner._finalize(); });