Bug 1650363 - Performance testing of HTTP3 in Firefox using only xpcshell tests r=sparky

Differential Revision: https://phabricator.services.mozilla.com/D84021
This commit is contained in:
Tarek Ziadé 2020-08-02 17:28:57 +00:00
parent f08a5f293b
commit 03cbd4d536
15 changed files with 493 additions and 56 deletions

View File

@ -0,0 +1,234 @@
"use strict";
var performance = performance || {};
performance.now = (function() {
return (
performance.now ||
performance.mozNow ||
performance.msNow ||
performance.oNow ||
performance.webkitNow ||
Date.now
);
})();
let h3Route;
let httpsOrigin;
let h3AltSvc;
let prefs;
let tests = [
// This test must be the first because it setsup alt-svc connection, that
// other tests use.
test_https_alt_svc,
test_download,
testsDone,
];
let current_test = 0;
function run_next_test() {
if (current_test < tests.length) {
dump("starting test number " + current_test + "\n");
tests[current_test]();
current_test++;
}
}
function run_test() {
let env = Cc["@mozilla.org/process/environment;1"].getService(
Ci.nsIEnvironment
);
let h2Port = env.get("MOZHTTP2_PORT");
Assert.notEqual(h2Port, null);
Assert.notEqual(h2Port, "");
let h3Port = env.get("MOZHTTP3_PORT");
Assert.notEqual(h3Port, null);
Assert.notEqual(h3Port, "");
h3AltSvc = ":" + h3Port;
h3Route = "foo.example.com:" + h3Port;
do_get_profile();
prefs = Cc["@mozilla.org/preferences-service;1"].getService(Ci.nsIPrefBranch);
prefs.setBoolPref("network.http.http3.enabled", true);
prefs.setCharPref("network.dns.localDomains", "foo.example.com");
// The certificate for the http3server server is for foo.example.com and
// is signed by http2-ca.pem so add that cert to the trust list as a
// signing cert.
let certdb = Cc["@mozilla.org/security/x509certdb;1"].getService(
Ci.nsIX509CertDB
);
addCertFromFile(certdb, "http2-ca.pem", "CTu,u,u");
httpsOrigin = "https://foo.example.com:" + h2Port + "/";
run_next_test();
}
function makeChan(uri) {
let chan = NetUtil.newChannel({
uri,
loadUsingSystemPrincipal: true,
}).QueryInterface(Ci.nsIHttpChannel);
chan.loadFlags = Ci.nsIChannel.LOAD_INITIAL_DOCUMENT_URI;
return chan;
}
let Http3CheckListener = function() {};
Http3CheckListener.prototype = {
onDataAvailableFired: false,
expectedRoute: "",
onStartRequest: function testOnStartRequest(request) {
Assert.ok(request instanceof Ci.nsIHttpChannel);
Assert.equal(request.status, Cr.NS_OK);
Assert.equal(request.responseStatus, 200);
},
onDataAvailable: function testOnDataAvailable(request, stream, off, cnt) {
this.onDataAvailableFired = true;
read_stream(stream, cnt);
},
onStopRequest: function testOnStopRequest(request, status) {
dump("status is " + status + "\n");
Assert.equal(status, Cr.NS_OK);
let routed = "NA";
try {
routed = request.getRequestHeader("Alt-Used");
} catch (e) {}
dump("routed is " + routed + "\n");
Assert.equal(routed, this.expectedRoute);
let httpVersion = "";
try {
httpVersion = request.protocolVersion;
} catch (e) {}
Assert.equal(httpVersion, "h3");
Assert.equal(this.onDataAvailableFired, true);
},
};
let WaitForHttp3Listener = function() {};
WaitForHttp3Listener.prototype = new Http3CheckListener();
WaitForHttp3Listener.prototype.uri = "";
WaitForHttp3Listener.prototype.h3AltSvc = "";
WaitForHttp3Listener.prototype.onStopRequest = function testOnStopRequest(
request,
status
) {
Assert.equal(status, Cr.NS_OK);
let routed = "NA";
try {
routed = request.getRequestHeader("Alt-Used");
} catch (e) {}
dump("routed is " + routed + "\n");
if (routed == this.expectedRoute) {
Assert.equal(routed, this.expectedRoute); // always true, but a useful log
let httpVersion = "";
try {
httpVersion = request.protocolVersion;
} catch (e) {}
Assert.equal(httpVersion, "h3");
run_next_test();
} else {
dump("poll later for alt svc mapping\n");
do_test_pending();
do_timeout(500, () => {
doTest(this.uri, this.expectedRoute, this.h3AltSvc);
});
}
do_test_finished();
};
function doTest(uri, expectedRoute, altSvc) {
let chan = makeChan(uri);
let listener = new WaitForHttp3Listener();
listener.uri = uri;
listener.expectedRoute = expectedRoute;
listener.h3AltSvc = altSvc;
chan.setRequestHeader("x-altsvc", altSvc, false);
chan.asyncOpen(listener);
}
// Test Alt-Svc for http3.
// H2 server returns alt-svc=h3-27=:h3port
function test_https_alt_svc() {
dump("test_https_alt_svc()\n");
do_test_pending();
doTest(httpsOrigin + "http3-test", h3Route, h3AltSvc);
}
let PerfHttp3Listener = function() {};
PerfHttp3Listener.prototype = new Http3CheckListener();
PerfHttp3Listener.prototype.amount = 0;
PerfHttp3Listener.prototype.bytesRead = 0;
PerfHttp3Listener.prototype.startTime = 0;
PerfHttp3Listener.prototype.onStartRequest = function testOnStartRequest(
request
) {
this.startTime = performance.now();
Http3CheckListener.prototype.onStartRequest.call(this, request);
};
PerfHttp3Listener.prototype.onDataAvailable = function testOnStopRequest(
request,
stream,
off,
cnt
) {
this.bytesRead += cnt;
Http3CheckListener.prototype.onDataAvailable.call(
this,
request,
stream,
off,
cnt
);
};
PerfHttp3Listener.prototype.onStopRequest = function testOnStopRequest(
request,
status
) {
let stopTime = performance.now();
Http3CheckListener.prototype.onStopRequest.call(this, request, status);
Assert.equal(this.bytesRead, this.amount);
let speed = (this.bytesRead * 1000) / (stopTime - this.startTime);
info("perfMetrics", { speed });
run_next_test();
do_test_finished();
};
function test_download() {
dump("test_download()\n");
let listener = new PerfHttp3Listener();
listener.expectedRoute = h3Route;
listener.amount = 1024 * 1024;
let chan = makeChan(httpsOrigin + listener.amount.toString());
chan.asyncOpen(listener);
do_test_pending();
}
function testsDone() {
prefs.clearUserPref("network.http.http3.enabled");
prefs.clearUserPref("network.dns.localDomains");
dump("testDone\n");
do_test_pending();
do_test_finished();
}

View File

@ -427,6 +427,8 @@ skip-if = os == "android"
skip-if = asan || tsan || os == 'win' || os =='android'
[test_http3_421.js]
skip-if = asan || tsan || os == 'win' || os =='android'
[test_http3_perf.js]
skip-if = asan || tsan || os == 'win' || os =='android'
[test_node_execute.js]
[test_loadgroup_cancel.js]
[test_obs-fold.js]

View File

@ -522,7 +522,35 @@ ARCHIVE_FILES = {
},
{
'source': buildconfig.topsrcdir,
'pattern': 'testing/mozharness/**'
'pattern': 'testing/mozharness/**'
},
{
'source': buildconfig.topsrcdir,
'pattern': 'browser/config/**'
},
{
'source': buildconfig.topsrcdir,
'pattern': 'build/moz.configure/**'
},
{
'source': buildconfig.topobjdir,
'pattern': 'dist/bin/**',
},
{
'source': buildconfig.topobjdir,
'base': '_tests/modules',
'pattern': '**',
'dest': 'dist/bin/modules'
},
{
'source': buildconfig.topobjdir,
'pattern': 'dist/plugins/**'
},
{
'source': buildconfig.topsrcdir,
'base': 'netwerk/test/http3serverDB',
'pattern': '**',
'dest': 'netwerk/test/http3serverDB',
}
],
'condprof': [

View File

@ -11,7 +11,7 @@ try:
from mozbuild.base import MozbuildObject, MachCommandConditions as conditions
build_obj = MozbuildObject.from_environment(cwd=here)
except ImportError:
except Exception:
build_obj = None
conditions = None

View File

@ -11,7 +11,7 @@ from mozbuild.base import MachCommandBase, MachCommandConditions as conditions
_TRY_PLATFORMS = {"g5": "perftest-android-hw-g5", "p2": "perftest-android-hw-p2"}
ON_TRY = "MOZ_AUTOMATION" in os.environ
HERE = os.path.dirname(__file__)
def get_perftest_parser():
@ -124,9 +124,18 @@ class PerftestTests(MachCommandBase):
def run_tests(self, **kwargs):
MachCommandBase.activate_virtualenv(self)
from pathlib import Path
from mozperftest.utils import temporary_env
with temporary_env(
COVERAGE_RCFILE=str(Path(HERE, ".coveragerc")), RUNNING_TESTS="YES"
):
self._run_tests(**kwargs)
def _run_tests(self, **kwargs):
from pathlib import Path
from mozperftest.runner import _setup_path
from mozperftest.utils import install_package, temporary_env
from mozperftest.utils import install_package, ON_TRY
skip_linters = kwargs.get("skip_linters", False)
verbose = kwargs.get("verbose", False)
@ -157,21 +166,20 @@ class PerftestTests(MachCommandBase):
for dep in vendors:
install_package(self.virtualenv_manager, str(Path(pydeps, dep)))
here = Path(__file__).parent.resolve()
if not ON_TRY and not skip_linters:
# formatting the code with black
assert self._run_python_script("black", str(here))
assert self._run_python_script("black", str(HERE))
# checking flake8 correctness
if not (ON_TRY and sys.platform == "darwin") and not skip_linters:
assert self._run_python_script("flake8", str(here))
assert self._run_python_script("flake8", str(HERE))
# running pytest with coverage
# coverage is done in three steps:
# 1/ coverage erase => erase any previous coverage data
# 2/ coverage run pytest ... => run the tests and collect info
# 3/ coverage report => generate the report
tests_dir = Path(here, "tests").resolve()
tests_dir = Path(HERE, "tests").resolve()
tests = kwargs.get("tests", [])
if tests == []:
tests = str(tests_dir)
@ -192,21 +200,20 @@ class PerftestTests(MachCommandBase):
if kwargs.get("verbose"):
options += "v"
with temporary_env(COVERAGE_RCFILE=str(here / ".coveragerc")):
if run_coverage_check:
assert self._run_python_script(
"coverage", "erase", label="remove old coverage data"
)
args = [
"run",
pytest.__file__,
options,
tests,
]
if run_coverage_check:
assert self._run_python_script(
"coverage", *args, label="running tests", verbose=verbose
"coverage", "erase", label="remove old coverage data"
)
if run_coverage_check and not self._run_python_script(
"coverage", "report", display=True
):
raise ValueError("Coverage is too low!")
args = [
"run",
pytest.__file__,
options,
tests,
]
assert self._run_python_script(
"coverage", *args, label="running tests", verbose=verbose
)
if run_coverage_check and not self._run_python_script(
"coverage", "report", display=True
):
raise ValueError("Coverage is too low!")

View File

@ -28,10 +28,13 @@ import os
import shutil
import sys
import logging
from pathlib import Path
HERE = os.path.dirname(__file__)
SRC_ROOT = os.path.join(HERE, "..", "..", "..")
TASKCLUSTER = "TASK_ID" in os.environ.keys()
RUNNING_TESTS = "RUNNING_TESTS" in os.environ.keys()
HERE = Path(__file__).parent
SRC_ROOT = Path(HERE, "..", "..", "..").resolve()
SEARCH_PATHS = [
"python/mach",
"python/mozboot",
@ -48,6 +51,7 @@ SEARCH_PATHS = [
"testing/mozbase/mozprofile",
"testing/mozbase/mozproxy",
"third_party/python/attrs/src",
"third_party/python/blessings",
"third_party/python/distro",
"third_party/python/dlmanager",
"third_party/python/esprima",
@ -62,23 +66,25 @@ SEARCH_PATHS = [
]
if TASKCLUSTER:
SEARCH_PATHS.append("xpcshell")
# XXX need to make that for all systems flavors
if "SHELL" not in os.environ:
os.environ["SHELL"] = "/bin/bash"
def _setup_path():
"""Adds all dependencies in the path.
"""Adds all available dependencies in the path.
This is done so the runner can be used with no prior
install in all execution environments.
"""
for path in SEARCH_PATHS:
path = os.path.abspath(path)
path = os.path.join(SRC_ROOT, path)
if not os.path.exists(path):
raise IOError("Can't find %s" % path)
sys.path.insert(0, path)
path = Path(SRC_ROOT, path).resolve()
if path.exists():
sys.path.insert(0, str(path))
def run_tests(mach_cmd, **kwargs):
@ -150,16 +156,41 @@ def main(argv=sys.argv[1:]):
"""
_setup_path()
from mozbuild.mozconfig import MozconfigLoader
from mozbuild.base import MachCommandBase, MozbuildObject
from mozperftest import PerftestArgumentParser
from mozboot.util import get_state_dir
from mach.logging import LoggingManager
config = MozbuildObject.from_environment()
mozconfig = SRC_ROOT / "browser" / "config" / "mozconfig"
if mozconfig.exists():
os.environ["MOZCONFIG"] = str(mozconfig)
if "--xpcshell-mozinfo" in argv:
mozinfo = argv[argv.index("--xpcshell-mozinfo") + 1]
topobjdir = Path(mozinfo).parent
else:
topobjdir = None
config = MozbuildObject(
str(SRC_ROOT),
None,
LoggingManager(),
topobjdir=topobjdir,
mozconfig=MozconfigLoader.AUTODETECT,
)
config.topdir = config.topsrcdir
config.cwd = os.getcwd()
config.state_dir = get_state_dir()
config.log_manager = LoggingManager()
# This monkey patch forces mozbuild to reuse
# our configuration when it tries to re-create
# it from the environment.
def _here(*args, **kw):
return config
MozbuildObject.from_environment = _here
mach_cmd = MachCommandBase(config)
parser = PerftestArgumentParser(description="vanilla perftest")
args = parser.parse_args(args=argv)

View File

@ -1,9 +1,9 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
from pathlib import Path
from collections import defaultdict
from mozperftest.layers import Layer
from mozperftest.utils import temp_dir, silence
@ -39,6 +39,22 @@ class XPCShell(Layer):
arguments = {
"cycles": {"type": int, "default": 13, "help": "Number of full cycles"},
"binary": {
"type": str,
"default": None,
"help": (
"xpcshell binary path. If not provided, "
"looks for it in the source tree."
),
},
"mozinfo": {
"type": str,
"default": None,
"help": (
"mozinfo binary path. If not provided, looks for it in the obj tree."
),
},
"nodejs": {"type": str, "default": None, "help": "nodejs binary path."},
}
def __init__(self, env, mach_cmd):
@ -51,6 +67,7 @@ class XPCShell(Layer):
self.bindir = mach_cmd.bindir
self.statedir = mach_cmd.statedir
self.metrics = []
self.topsrcdir = mach_cmd.topsrcdir
def setup(self):
self.mach_cmd.activate_virtualenv()
@ -70,31 +87,63 @@ class XPCShell(Layer):
if not manifest.exists():
raise FileNotFoundError(str(manifest))
nodejs = self.get_arg("nodejs")
if nodejs is not None:
os.environ["MOZ_NODE_PATH"] = nodejs
import runxpcshelltests
verbose = self.get_arg("verbose")
xpcshell = runxpcshelltests.XPCShellTests(log=self)
kwargs = {}
kwargs["testPaths"] = test.name
kwargs["verbose"] = True
kwargs["xpcshell"] = self.mach_cmd.get_binary_path("xpcshell")
kwargs["mozInfo"] = str(Path(self.topobjdir, "mozinfo.json"))
kwargs["verbose"] = verbose
binary = self.get_arg("binary")
if binary is None:
binary = self.mach_cmd.get_binary_path("xpcshell")
kwargs["xpcshell"] = binary
binary = Path(binary)
mozinfo = self.get_arg("mozinfo")
if mozinfo is None:
mozinfo = binary.parent / ".." / "mozinfo.json"
if not mozinfo.exists():
mozinfo = Path(self.topobjdir, "mozinfo.json")
else:
mozinfo = Path(mozinfo)
kwargs["mozInfo"] = str(mozinfo)
kwargs["symbolsPath"] = str(Path(self.distdir, "crashreporter-symbols"))
kwargs["logfiles"] = True
kwargs["profileName"] = "firefox"
kwargs["pluginsPath"] = str(Path(self.distdir, "plugins"))
kwargs["testingModulesDir"] = str(Path(self.topobjdir, "_tests/modules"))
plugins = binary.parent / "plugins"
if not plugins.exists():
plugins = Path(self.distdir, "plugins")
kwargs["pluginsPath"] = str(plugins)
modules = binary.parent / "modules"
if not modules.exists():
modules = Path(self.topobjdir, "_tests", "modules")
kwargs["testingModulesDir"] = str(modules)
kwargs["utility_path"] = self.bindir
kwargs["manifest"] = manifest
kwargs["totalChunks"] = 1
cycles = self.get_arg("cycles", 1)
self.info("Running %d cycles" % cycles)
class _display:
def __enter__(self, *args, **kw):
return self
__exit__ = __enter__
may_silence = not verbose and silence or _display
for cycle in range(cycles):
with temp_dir() as tmp, silence():
self.info("Cycle %d" % (cycle + 1))
with temp_dir() as tmp, may_silence():
kwargs["tempDir"] = tmp
if not xpcshell.runTests(kwargs):
raise XPCShellTestError()
self.info("Cycle %d" % (cycle + 1))
self.info("tests done.")
results = defaultdict(list)
@ -105,7 +154,7 @@ class XPCShell(Layer):
metadata.add_result(
{
"name": test.name,
"framework": {"name": "xpcshell"},
"framework": {"name": "mozperftest"},
"transformer": "mozperftest.test.xpcshell:XPCShellData",
"results": [
{"values": measures, "name": subtest}

View File

@ -0,0 +1 @@
{"appname": "firefox", "artifact": false, "asan": false, "bin_suffix": "", "bits": 64, "buildapp": "browser", "buildtype_guess": "opt", "cc_type": "clang", "ccov": false, "crashreporter": true, "datareporting": true, "debug": false, "devedition": false, "early_beta_or_earlier": true, "healthreport": true, "mozconfig": "/Users/tarek/Dev/gecko/mozilla-central-opt/browser/config/mozconfig", "nightly_build": true, "normandy": true, "official": false, "os": "mac", "pgo": false, "platform_guess": "macosx64", "processor": "x86_64", "release_or_beta": false, "require_signing": false, "stylo": true, "sync": true, "telemetry": false, "tests_enabled": true, "toolkit": "cocoa", "topsrcdir": "/Users/tarek/Dev/gecko/mozilla-central-opt", "tsan": false, "ubsan": false, "updater": true}

View File

@ -0,0 +1 @@
#

View File

@ -16,6 +16,7 @@ EXAMPLE_TEST = os.path.join(EXAMPLE_TESTS_DIR, "perftest_example.js")
EXAMPLE_XPCSHELL_TEST = Path(EXAMPLE_TESTS_DIR, "test_xpcshell.js")
BT_DATA = Path(HERE, "data", "browsertime-results", "browsertime.json")
DMG = Path(HERE, "data", "firefox.dmg")
MOZINFO = Path(HERE, "data", "mozinfo.json")
@contextlib.contextmanager

View File

@ -17,10 +17,10 @@ Registrar.commands_by_category = {"testing": set()}
from mozperftest.environment import MachEnvironment # noqa
from mozperftest.mach_commands import Perftest, PerftestTests, ON_TRY # noqa
from mozperftest import mach_commands # noqa
from mozperftest.mach_commands import Perftest, PerftestTests # noqa
from mozperftest import utils # noqa
from mozperftest.tests.support import EXAMPLE_TESTS_DIR # noqa
from mozperftest.utils import temporary_env, silence # noqa
from mozperftest.utils import temporary_env, silence, ON_TRY # noqa
ITERATION_HOOKS = Path(__file__).parent / "data" / "hooks_iteration.py"
@ -111,14 +111,14 @@ def test_doc_flavor(mocked_func):
@mock.patch("mozperftest.mach_commands.PerftestTests._run_python_script")
def test_test_runner(*mocked):
# simulating on try to run the paths parser
old = mach_commands.ON_TRY
mach_commands.ON_TRY = True
old = utils.ON_TRY
utils.ON_TRY = True
with _get_command(PerftestTests) as test, silence(test), temporary_env(
MOZ_AUTOMATION="1"
):
test.run_tests(tests=[EXAMPLE_TESTS_DIR])
mach_commands.ON_TRY = old
utils.ON_TRY = old
@mock.patch("mozperftest.MachEnvironment", new=_TestMachEnvironment)

View File

@ -4,9 +4,16 @@ import shutil
import pytest
from mozperftest.tests.support import get_running_env, EXAMPLE_XPCSHELL_TEST, temp_file
from mozperftest.tests.support import (
get_running_env,
EXAMPLE_XPCSHELL_TEST,
temp_file,
MOZINFO,
)
from mozperftest.environment import TEST, SYSTEM, METRICS
from mozperftest.test.xpcshell import XPCShellTestError
from mozperftest import utils
from mozperftest.test import xpcshell
class XPCShellTests:
@ -43,9 +50,13 @@ class XPCShellTestsFail(XPCShellTests):
return False
def running_env(**kw):
return get_running_env(flavor="xpcshell", xpcshell_mozinfo=MOZINFO, **kw)
@mock.patch("runxpcshelltests.XPCShellTests", new=XPCShellTests)
def test_xpcshell_metrics(*mocked):
mach_cmd, metadata, env = get_running_env(flavor="xpcshell")
mach_cmd, metadata, env = running_env()
sys = env.layers[SYSTEM]
xpcshell = env.layers[TEST]
@ -68,7 +79,7 @@ def test_xpcshell_metrics(*mocked):
@mock.patch("runxpcshelltests.XPCShellTests", new=XPCShellTestsFail)
def test_xpcshell_metrics_fail(*mocked):
mach_cmd, metadata, env = get_running_env(flavor="xpcshell")
mach_cmd, metadata, env = running_env()
sys = env.layers[SYSTEM]
xpcshell = env.layers[TEST]
env.set_arg("tests", [str(EXAMPLE_XPCSHELL_TEST)])
@ -82,9 +93,23 @@ def test_xpcshell_metrics_fail(*mocked):
@mock.patch("runxpcshelltests.XPCShellTests", new=XPCShellTests)
def test_xpcshell_perfherder(*mocked):
mach_cmd, metadata, env = get_running_env(
flavor="xpcshell", perfherder=True, xpcshell_cycles=10
)
return _test_xpcshell_perfherder(*mocked)
@mock.patch("runxpcshelltests.XPCShellTests", new=XPCShellTests)
def test_xpcshell_perfherder_on_try(*mocked):
old = utils.ON_TRY
utils.ON_TRY = xpcshell.ON_TRY = not utils.ON_TRY
try:
return _test_xpcshell_perfherder(*mocked)
finally:
utils.ON_TRY = old
xpcshell.ON_TRY = old
def _test_xpcshell_perfherder(*mocked):
mach_cmd, metadata, env = running_env(perfherder=True, xpcshell_cycles=10)
sys = env.layers[SYSTEM]
xpcshell = env.layers[TEST]
@ -105,7 +130,7 @@ def test_xpcshell_perfherder(*mocked):
# Check some metadata
assert output["application"]["name"] == "firefox"
assert output["framework"]["name"] == "xpcshell"
assert output["framework"]["name"] == "mozperftest"
# Check some numbers in our data
assert len(output["suites"]) == 1

View File

@ -21,6 +21,7 @@ RETRY_SLEEP = 10
API_ROOT = "https://firefox-ci-tc.services.mozilla.com/api/index/v1"
MULTI_REVISION_ROOT = f"{API_ROOT}/namespaces"
MULTI_TASK_ROOT = f"{API_ROOT}/tasks"
ON_TRY = "MOZ_AUTOMATION" in os.environ
@contextlib.contextmanager

View File

@ -8,8 +8,13 @@ job-defaults:
toolchain:
- linux64-node-10
- linux64-geckodriver
- linux64-minidump-stackwalk
- linux64-fix-stacks
build:
- artifact: target.mozinfo.json
- artifact: target.common.tests.tar.gz
- artifact: target.perftests.tests.tar.gz
- artifact: target.xpcshell.tests.tar.gz
- artifact: target.tar.bz2
platform: linux64-shippable/opt
require-build:
@ -35,6 +40,30 @@ domcount:
--browsertime-geckodriver ${MOZ_FETCHES_DIR}/geckodriver
--output $MOZ_FETCHES_DIR/../artifacts
http3:
description: Run HTTP/3 test
treeherder:
symbol: perftest(http3)
attributes:
batch: false
cron: true
run:
command: >-
mkdir -p $MOZ_FETCHES_DIR/../artifacts &&
cd $MOZ_FETCHES_DIR &&
python3.8 python/mozperftest/mozperftest/runner.py
xpcshell/tests/netwerk/test/unit/test_http3_perf.js
--flavor xpcshell
--perfherder
--perfherder-metrics name:speed,unit:bps
--output $MOZ_FETCHES_DIR/../artifacts
--xpcshell-binary ${MOZ_FETCHES_DIR}/dist/bin/xpcshell
--xpcshell-mozinfo ${MOZ_FETCHES_DIR}/target.mozinfo.json
--xpcshell-nodejs ${MOZ_FETCHES_DIR}/node/bin/node
--xpcshell-cycles 13
--output $MOZ_FETCHES_DIR/../artifacts
--verbose
livesites:
description: Live site performance testing
variants: [http3]

View File

@ -6,6 +6,9 @@ job-defaults:
worker-type: t-osx-1014
fetches:
build:
- artifact: target.mozinfo.json
- artifact: target.common.tests.tar.gz
- artifact: target.xpcshell.tests.tar.gz
- artifact: target.perftests.tests.tar.gz
- artifact: target.dmg
extract: false
@ -38,6 +41,31 @@ domcount:
--browsertime-geckodriver ${MOZ_FETCHES_DIR}/geckodriver
--output $MOZ_FETCHES_DIR/../artifacts
http3:
description: Run HTTP/3 test
treeherder:
symbol: perftest(http3)
attributes:
batch: false
cron: true
run:
command: >-
mkdir -p $MOZ_FETCHES_DIR/../artifacts &&
cd $MOZ_FETCHES_DIR &&
python3 -m venv . &&
bin/python3 python/mozperftest/mozperftest/runner.py
xpcshell/tests/netwerk/test/unit/test_http3_perf.js
--flavor xpcshell
--perfherder
--perfherder-metrics name:speed,unit:bps
--output $MOZ_FETCHES_DIR/../artifacts
--xpcshell-binary ${MOZ_FETCHES_DIR}/dist/bin/xpcshell
--xpcshell-mozinfo ${MOZ_FETCHES_DIR}/target.mozinfo.json
--xpcshell-nodejs ${MOZ_FETCHES_DIR}/node/bin/node
--xpcshell-cycles 13
--output $MOZ_FETCHES_DIR/../artifacts
--verbose
livesites:
description: Live site performance testing