diff --git a/servo/etc/ci/performance/README.md b/servo/etc/ci/performance/README.md
index 9c9986f937e9..948e788d7d54 100644
--- a/servo/etc/ci/performance/README.md
+++ b/servo/etc/ci/performance/README.md
@@ -39,14 +39,19 @@ Servo Page Load Time Test
# Add your own test
+* You can add two types of tests: sync test and async test
+ * sync test: measure the page load time. Exits automatically after page loaded.
+ * async test: measures your custom time markers from JavaScript, see `page_load_test/example/example_async.html` for example.
* Add you test case (html file) to the `page_load_test/` folder. For example we can create a `page_load_test/example/example.html`
* Add a manifest (or modify existing ones) named `page_load_test/example.manifest`
* Add the lines like this to the manifest:
```
-http://localhost:8000/page_load_test/example/example.html
-# This is a comment
# Pages got served on a local server at localhost:8000
+# Test case without any flag is a sync test
+http://localhost:8000/page_load_test/example/example_sync.html
+# Async test must start with a `async` flag
+async http://localhost:8000/page_load_test/example/example.html
```
* Modify the `MANIFEST=...` link in `test_all.sh` and point that to the new manifest file.
diff --git a/servo/etc/ci/performance/gecko_driver.py b/servo/etc/ci/performance/gecko_driver.py
index fe1450d1d1c6..8632a1d5f58b 100644
--- a/servo/etc/ci/performance/gecko_driver.py
+++ b/servo/etc/ci/performance/gecko_driver.py
@@ -71,7 +71,7 @@ def generate_placeholder(testcase):
return [timings]
-def run_gecko_test(testcase, timeout):
+def run_gecko_test(testcase, timeout, is_async):
with create_gecko_session() as driver:
driver.set_page_load_timeout(timeout)
try:
@@ -97,6 +97,16 @@ def run_gecko_test(testcase, timeout):
print("Failed to get a valid timing measurement.")
return generate_placeholder(testcase)
+ if is_async:
+ # TODO: the timeout is hardcoded
+ driver.implicitly_wait(5) # sec
+ driver.find_element_by_id("GECKO_TEST_DONE")
+ timings.update(json.loads(
+ driver.execute_script(
+ "return JSON.stringify(window.customTimers)"
+ )
+ ))
+
return [timings]
diff --git a/servo/etc/ci/performance/harness/harness.js b/servo/etc/ci/performance/harness/harness.js
new file mode 100644
index 000000000000..14edbcd4d1ee
--- /dev/null
+++ b/servo/etc/ci/performance/harness/harness.js
@@ -0,0 +1,22 @@
+window.customTimers = {};
+// Create a custome timestamp with a custom name
+function mark(name) {
+ if (window.performance) {
+ // performance.now() is the time after navigationStart
+ // https://developer.mozilla.org/en-US/docs/Web/API/Performance/now
+ var time = performance.now() + performance.timing.navigationStart;
+ }
+ else {
+ var time = (new Date()).getTime();
+ }
+ window.customTimers[name] = time;
+}
+
+// Notifying the test harness that the test has ended, otherwise the test
+// harness will time out
+function done() {
+ var elem = document.createElement('span')
+ elem.id = "GECKO_TEST_DONE";
+ document.body.appendChild(elem);
+}
+
diff --git a/servo/etc/ci/performance/page_load_test/example.manifest b/servo/etc/ci/performance/page_load_test/example.manifest
new file mode 100644
index 000000000000..90c1bf0f2ae2
--- /dev/null
+++ b/servo/etc/ci/performance/page_load_test/example.manifest
@@ -0,0 +1,3 @@
+http://localhost:8000/page_load_test/example/example_sync.html
+async http://localhost:8000/page_load_test/example/example_async.html
+
diff --git a/servo/etc/ci/performance/page_load_test/example/example_async.html b/servo/etc/ci/performance/page_load_test/example/example_async.html
new file mode 100644
index 000000000000..54183eca383a
--- /dev/null
+++ b/servo/etc/ci/performance/page_load_test/example/example_async.html
@@ -0,0 +1,23 @@
+
+
+
+
+
+
+
+
+ Hello World
+
+
+
+
+
diff --git a/servo/etc/ci/performance/page_load_test/example/example_sync.html b/servo/etc/ci/performance/page_load_test/example/example_sync.html
new file mode 100644
index 000000000000..2d5b0c72aecb
--- /dev/null
+++ b/servo/etc/ci/performance/page_load_test/example/example_sync.html
@@ -0,0 +1,13 @@
+
+
+
+
+
+
+
+
+ Hello World
+
+
+
diff --git a/servo/etc/ci/performance/runner.py b/servo/etc/ci/performance/runner.py
index 414eb12aeaba..864703de312b 100644
--- a/servo/etc/ci/performance/runner.py
+++ b/servo/etc/ci/performance/runner.py
@@ -20,8 +20,15 @@ def load_manifest(filename):
def parse_manifest(text):
- return filter(lambda x: x != "" and not x.startswith("#"),
- map(lambda x: x.strip(), text.splitlines()))
+ lines = filter(lambda x: x != "" and not x.startswith("#"),
+ map(lambda x: x.strip(), text.splitlines()))
+ output = []
+ for line in lines:
+ if line.split(" ")[0] == "async":
+ output.append((line.split(" ")[1], True))
+ else:
+ output.append((line.split(" ")[0], False))
+ return output
def execute_test(url, command, timeout):
@@ -39,7 +46,12 @@ def execute_test(url, command, timeout):
return ""
-def run_servo_test(url, timeout):
+def run_servo_test(url, timeout, is_async):
+ if is_async:
+ print("Servo does not support async test!")
+ # Return a placeholder
+ return parse_log("", url)
+
ua_script_path = "{}/user-agent-js".format(os.getcwd())
command = [
"../../../target/release/servo", url,
@@ -157,7 +169,7 @@ def parse_log(log, testcase):
def filter_result_by_manifest(result_json, manifest):
filtered = []
- for name in manifest:
+ for name, is_async in manifest:
match = [tc for tc in result_json if tc['testcase'] == name]
if len(match) == 0:
raise Exception(("Missing test result: {}. This will cause a "
@@ -177,7 +189,7 @@ def take_result_median(result_json, expected_runs):
median_result = {}
for k, _ in group[0].items():
- if k == "testcase":
+ if k == "testcase" or k == "title":
median_result[k] = group[0][k]
else:
try:
@@ -257,14 +269,14 @@ def main():
# Assume the server is up and running
testcases = load_manifest(args.tp5_manifest)
results = []
- for testcase in testcases:
+ for testcase, is_async in testcases:
for run in range(args.runs):
print("Running test {}/{} on {}".format(run + 1,
args.runs,
testcase))
# results will be a mixure of timings dict and testcase strings
# testcase string indicates a failed test
- results += run_test(testcase, args.timeout)
+ results += run_test(testcase, args.timeout, is_async)
print("Finished")
# TODO: Record and analyze other performance.timing properties
diff --git a/servo/etc/ci/performance/test_all.sh b/servo/etc/ci/performance/test_all.sh
index cedf1e60aa9b..86fd337b668b 100755
--- a/servo/etc/ci/performance/test_all.sh
+++ b/servo/etc/ci/performance/test_all.sh
@@ -40,7 +40,8 @@ python3 -m http.server > /dev/null 2>&1 &
# TODO: enable the full manifest when #11087 is fixed
# https://github.com/servo/servo/issues/11087
# MANIFEST="page_load_test/test.manifest"
-MANIFEST="page_load_test/tp5n/20160509.manifest" # A manifest that excludes
+# MANIFEST="page_load_test/tp5n/20160509.manifest" # A manifest that excludes
+MANIFEST="page_load_test/example.manifest" # A manifest that excludes
# timeout test cases
PERF_FILE="output/perf-$(date +%s).json"
diff --git a/servo/etc/ci/performance/test_runner.py b/servo/etc/ci/performance/test_runner.py
index c7b0c2b7011d..27bbf598b34c 100644
--- a/servo/etc/ci/performance/test_runner.py
+++ b/servo/etc/ci/performance/test_runner.py
@@ -163,6 +163,7 @@ def test_log_parser_empty():
expected = [{
"testcase": "http://localhost:8000/page_load_test/56.com/www.56.com/index.html",
+ "title": "",
"navigationStart": 0,
"unloadEventStart": -1,
"unloadEventEnd": -1,
@@ -195,6 +196,7 @@ def test_log_parser_error():
expected = [{
"testcase": "http://localhost:8000/page_load_test/56.com/www.56.com/index.html",
+ "title": "",
"navigationStart": 0,
"unloadEventStart": -1,
"unloadEventEnd": -1,
@@ -254,6 +256,7 @@ Shutting down the Constellation after generating an output file or exit flag spe
expected = [{
"testcase": "http://localhost:8000/page_load_test/56.com/www.56.com/index.html",
+ "title": "",
"navigationStart": 0,
"unloadEventStart": -1,
"unloadEventEnd": -1,
@@ -290,9 +293,22 @@ http://localhost/page_load_test/tp5n/aljazeera.net/aljazeera.net/portal.html
# Disabled! http://localhost/page_load_test/tp5n/aljazeera.net/aljazeera.net/portal.html
'''
expected = [
- "http://localhost/page_load_test/tp5n/163.com/www.163.com/index.html",
- "http://localhost/page_load_test/tp5n/56.com/www.56.com/index.html",
- "http://localhost/page_load_test/tp5n/aljazeera.net/aljazeera.net/portal.html"
+ ("http://localhost/page_load_test/tp5n/163.com/www.163.com/index.html", False),
+ ("http://localhost/page_load_test/tp5n/56.com/www.56.com/index.html", False),
+ ("http://localhost/page_load_test/tp5n/aljazeera.net/aljazeera.net/portal.html", False)
+ ]
+ assert(expected == list(runner.parse_manifest(text)))
+
+
+def test_manifest_loader_async():
+
+ text = '''
+http://localhost/page_load_test/tp5n/163.com/www.163.com/index.html
+async http://localhost/page_load_test/tp5n/56.com/www.56.com/index.html
+'''
+ expected = [
+ ("http://localhost/page_load_test/tp5n/163.com/www.163.com/index.html", False),
+ ("http://localhost/page_load_test/tp5n/56.com/www.56.com/index.html", True),
]
assert(expected == list(runner.parse_manifest(text)))
@@ -315,7 +331,7 @@ def test_filter_result_by_manifest():
}]
manifest = [
- "http://localhost:8000/page_load_test/56.com/www.56.com/index.html",
+ ("http://localhost:8000/page_load_test/56.com/www.56.com/index.html", False)
]
assert(expected == runner.filter_result_by_manifest(input_json, manifest))
@@ -328,8 +344,8 @@ def test_filter_result_by_manifest_error():
}]
manifest = [
- "1.html",
- "2.html"
+ ("1.html", False),
+ ("2.html", False)
]
with pytest.raises(Exception) as execinfo: