Bug 1692821 - Migrate webaudio to browsertime desktop r=perftest-reviewers,Bebe

Latest build: https://treeherder.mozilla.org/#/jobs?repo=try&revision=e3aa1b7f74d905e8565b4afb2f62dc7b984381da
Please leave the landing to the author of the patch!

Differential Revision: https://phabricator.services.mozilla.com/D106930
This commit is contained in:
Alex Ionescu 2021-03-25 12:29:05 +00:00
parent 866eb1e826
commit b11a35b555
8 changed files with 86 additions and 57 deletions

View File

@ -142,6 +142,10 @@ browsertime-benchmark:
- [motionmark-animometer, mm-a]
- [motionmark-htmlsuite, mm-h]
- [stylebench, sb]
- [webaudio, wa]
variants: ["fission"]
apps: ["firefox"]
treeherder-symbol: Btime()
tier:
by-app:
firefox:
@ -168,8 +172,6 @@ browsertime-benchmark:
windows10.*shippable.*: ["trunk", "mozilla-beta"]
default: []
default: []
apps: ["firefox"]
treeherder-symbol: Btime()
browsertime-tp6-live:
<<: *tp6-defaults

View File

@ -233,6 +233,7 @@ raptor-webaudio-firefox:
variants: ["fission"]
try-name: raptor-webaudio-firefox
treeherder-symbol: Rap(wa)
run-on-projects: []
tier: 1
mozharness:
extra-options:
@ -242,6 +243,7 @@ raptor-webaudio-firefox-profiling:
description: "Raptor WebAudio on Firefox with Gecko Profiling"
try-name: raptor-webaudio-firefox
treeherder-symbol: Rap-Prof(wa)
run-on-projects: []
max-run-time: 900
mozharness:
extra-options:

View File

@ -222,7 +222,9 @@ class Browsertime(Perftest):
browsertime_script.extend(["--browsertime.url", test["test_url"]])
# Raptor's `pageCycleDelay` delay (ms) between pageload cycles
browsertime_script.extend(["--browsertime.page_cycle_delay", "1000"])
browsertime_script.extend(
["--browsertime.page_cycle_delay", str(self.post_startup_delay)]
)
# Raptor's `post startup delay` is settle time after the browser has started
browsertime_script.extend(

View File

@ -806,6 +806,59 @@ class PerftestOutput(object):
return subtests, vals
def parseWebaudioOutput(self, test):
# each benchmark 'index' becomes a subtest; each pagecycle / iteration
# of the test has multiple values per index/subtest
# this is the format we receive the results in from the benchmark
# i.e. this is ONE pagecycle of speedometer:
# {u'name': u'raptor-webaudio-firefox', u'type': u'benchmark', u'measurements':
# {u'webaudio': [[u'[{"name":"Empty testcase","duration":26,"buffer":{}},{"name"
# :"Simple gain test without resampling","duration":66,"buffer":{}},{"name":"Simple
# gain test without resampling (Stereo)","duration":71,"buffer":{}},{"name":"Simple
# gain test without resampling (Stereo and positional)","duration":67,"buffer":{}},
# {"name":"Simple gain test","duration":41,"buffer":{}},{"name":"Simple gain test
# (Stereo)","duration":59,"buffer":{}},{"name":"Simple gain test (Stereo and positional)",
# "duration":68,"buffer":{}},{"name":"Upmix without resampling (Mono -> Stereo)",
# "duration":53,"buffer":{}},{"name":"Downmix without resampling (Mono -> Stereo)",
# "duration":44,"buffer":{}},{"name":"Simple mixing (same buffer)",
# "duration":288,"buffer":{}}
_subtests = {}
data = test["measurements"]["webaudio"]
for page_cycle in data:
data = json.loads(page_cycle[0])
for item in data:
# for each pagecycle, build a list of subtests and append all related replicates
sub = item["name"]
replicates = [item["duration"]]
if sub not in _subtests:
# subtest not added yet, first pagecycle, so add new one
_subtests[sub] = {
"unit": test["subtest_unit"],
"alertThreshold": float(test["alert_threshold"]),
"lowerIsBetter": test["subtest_lower_is_better"],
"name": sub,
"replicates": [],
}
# pylint: disable=W1633
_subtests[sub]["replicates"].extend(
[float(round(x, 3)) for x in replicates]
)
vals = []
subtests = []
names = list(_subtests)
names.sort(reverse=True)
for name in names:
_subtests[name]["value"] = filters.median(_subtests[name]["replicates"])
subtests.append(_subtests[name])
vals.append([_subtests[name]["value"], name])
print(subtests)
return subtests, vals
class RaptorOutput(PerftestOutput):
"""class for raptor output"""
@ -1212,59 +1265,6 @@ class RaptorOutput(PerftestOutput):
return subtests, vals
def parseWebaudioOutput(self, test):
# each benchmark 'index' becomes a subtest; each pagecycle / iteration
# of the test has multiple values per index/subtest
# this is the format we receive the results in from the benchmark
# i.e. this is ONE pagecycle of speedometer:
# {u'name': u'raptor-webaudio-firefox', u'type': u'benchmark', u'measurements':
# {u'webaudio': [[u'[{"name":"Empty testcase","duration":26,"buffer":{}},{"name"
# :"Simple gain test without resampling","duration":66,"buffer":{}},{"name":"Simple
# gain test without resampling (Stereo)","duration":71,"buffer":{}},{"name":"Simple
# gain test without resampling (Stereo and positional)","duration":67,"buffer":{}},
# {"name":"Simple gain test","duration":41,"buffer":{}},{"name":"Simple gain test
# (Stereo)","duration":59,"buffer":{}},{"name":"Simple gain test (Stereo and positional)",
# "duration":68,"buffer":{}},{"name":"Upmix without resampling (Mono -> Stereo)",
# "duration":53,"buffer":{}},{"name":"Downmix without resampling (Mono -> Stereo)",
# "duration":44,"buffer":{}},{"name":"Simple mixing (same buffer)",
# "duration":288,"buffer":{}}
_subtests = {}
data = test["measurements"]["webaudio"]
for page_cycle in data:
data = json.loads(page_cycle[0])
for item in data:
# for each pagecycle, build a list of subtests and append all related replicates
sub = item["name"]
replicates = [item["duration"]]
if sub not in _subtests:
# subtest not added yet, first pagecycle, so add new one
_subtests[sub] = {
"unit": test["subtest_unit"],
"alertThreshold": float(test["alert_threshold"]),
"lowerIsBetter": test["subtest_lower_is_better"],
"name": sub,
"replicates": [],
}
# pylint: disable=W1633
_subtests[sub]["replicates"].extend(
[float(round(x, 3)) for x in replicates]
)
vals = []
subtests = []
names = list(_subtests)
names.sort(reverse=True)
for name in names:
_subtests[name]["value"] = filters.median(_subtests[name]["replicates"])
subtests.append(_subtests[name])
vals.append([_subtests[name]["value"], name])
print(subtests)
return subtests, vals
def parseSunspiderOutput(self, test):
_subtests = {}
data = test["measurements"]["sunspider"]
@ -1560,6 +1560,8 @@ class BrowsertimeOutput(PerftestOutput):
subtests, vals = self.parseYoutubePlaybackPerformanceOutput(test)
if "unity-webgl" in test["name"]:
subtests, vals = self.parseUnityWebGLOutput(test)
if "webaudio" in test["measurements"]:
subtests, vals = self.parseWebaudioOutput(test)
if subtests is None:
raise Exception("No benchmark metrics found in browsertime results")

View File

@ -52,3 +52,4 @@
[include:tests/benchmarks/motionmark-animometer.ini]
[include:tests/benchmarks/motionmark-htmlsuite.ini]
[include:tests/benchmarks/stylebench.ini]
[include:tests/benchmarks/webaudio.ini]

View File

@ -0,0 +1,19 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# speedometer benchmark for firefox and chromium distributions
[DEFAULT]
type = benchmark
test_url = http://<host>:<port>/webaudio/index.html?raptor
page_cycles = 5
page_timeout = 360000
apps = firefox, chrome, chromium
unit = score
lower_is_better = true
alert_threshold = 2.0
gecko_profile_interval = 1
gecko_profile_entries = 4000000
[webaudio]

View File

@ -678,7 +678,7 @@ Utilities.extendObject(window.benchmarkController, {
tpRecordTime(values.join(','), 0, fullNames.join(','));
}
if (this.raptor) {
_data = ['raptor-benchmark', 'motionmark', item['testsResults']];
_data = ['raptor-benchmark', 'motionmark', item['testsResults']];
window.postMessage(_data, '*');
}

View File

@ -151,6 +151,7 @@ function allDone() {
if (location.search.includes("raptor")) {
var _data = ['raptor-benchmark', 'webaudio', JSON.stringify(results)];
window.postMessage(_data, '*');
window.sessionStorage.setItem('benchmark_results', JSON.stringify(_data));
} else {
var xhr = new XMLHttpRequest();
xhr.open("POST", "/results", true);