mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-28 07:13:20 +00:00
servo: Merge #15067 - Added async performance test (from shinglyu:stylo-perf-async); r=Manishearth
<!-- Please describe your changes on the following line: --> Add a new way to test arbitrary timing from JavaScript (only for Gecko). This is for Stylo testing. --- <!-- Thank you for contributing to Servo! Please replace each `[ ]` by `[X]` when the step is complete, and replace `__` with appropriate data: --> - [x] `./mach build -d` does not report any errors - [x] `./mach test-tidy` does not report any errors - [ ] These changes fix #__ (github issue number if applicable). <!-- Either: --> - [x] There are tests for these changes OR - [ ] These changes do not require tests because _____ <!-- Pull requests that do not address these steps are welcome, but they will require additional verification as part of the review process. --> Source-Repo: https://github.com/servo/servo Source-Revision: e9933f36b771e9aad24b7abcac0ae81b49539d35 --HG-- extra : subtree_source : https%3A//hg.mozilla.org/projects/converted-servo-linear extra : subtree_revision : 302e5278f435e9dc896d98012769fb1528bab571
This commit is contained in:
parent
f3c730e169
commit
7a83e02c27
@ -39,14 +39,19 @@ Servo Page Load Time Test
|
||||
|
||||
# Add your own test
|
||||
|
||||
* You can add two types of tests: sync test and async test
|
||||
* sync test: measure the page load time. Exits automatically after page loaded.
|
||||
* async test: measures your custom time markers from JavaScript, see `page_load_test/example/example_async.html` for example.
|
||||
* Add you test case (html file) to the `page_load_test/` folder. For example we can create a `page_load_test/example/example.html`
|
||||
* Add a manifest (or modify existing ones) named `page_load_test/example.manifest`
|
||||
* Add the lines like this to the manifest:
|
||||
|
||||
```
|
||||
http://localhost:8000/page_load_test/example/example.html
|
||||
# This is a comment
|
||||
# Pages got served on a local server at localhost:8000
|
||||
# Test case without any flag is a sync test
|
||||
http://localhost:8000/page_load_test/example/example_sync.html
|
||||
# Async test must start with a `async` flag
|
||||
async http://localhost:8000/page_load_test/example/example.html
|
||||
```
|
||||
* Modify the `MANIFEST=...` link in `test_all.sh` and point that to the new manifest file.
|
||||
|
||||
|
@ -71,7 +71,7 @@ def generate_placeholder(testcase):
|
||||
return [timings]
|
||||
|
||||
|
||||
def run_gecko_test(testcase, timeout):
|
||||
def run_gecko_test(testcase, timeout, is_async):
|
||||
with create_gecko_session() as driver:
|
||||
driver.set_page_load_timeout(timeout)
|
||||
try:
|
||||
@ -97,6 +97,16 @@ def run_gecko_test(testcase, timeout):
|
||||
print("Failed to get a valid timing measurement.")
|
||||
return generate_placeholder(testcase)
|
||||
|
||||
if is_async:
|
||||
# TODO: the timeout is hardcoded
|
||||
driver.implicitly_wait(5) # sec
|
||||
driver.find_element_by_id("GECKO_TEST_DONE")
|
||||
timings.update(json.loads(
|
||||
driver.execute_script(
|
||||
"return JSON.stringify(window.customTimers)"
|
||||
)
|
||||
))
|
||||
|
||||
return [timings]
|
||||
|
||||
|
||||
|
22
servo/etc/ci/performance/harness/harness.js
Normal file
22
servo/etc/ci/performance/harness/harness.js
Normal file
@ -0,0 +1,22 @@
|
||||
window.customTimers = {};
|
||||
// Create a custome timestamp with a custom name
|
||||
function mark(name) {
|
||||
if (window.performance) {
|
||||
// performance.now() is the time after navigationStart
|
||||
// https://developer.mozilla.org/en-US/docs/Web/API/Performance/now
|
||||
var time = performance.now() + performance.timing.navigationStart;
|
||||
}
|
||||
else {
|
||||
var time = (new Date()).getTime();
|
||||
}
|
||||
window.customTimers[name] = time;
|
||||
}
|
||||
|
||||
// Notifying the test harness that the test has ended, otherwise the test
|
||||
// harness will time out
|
||||
function done() {
|
||||
var elem = document.createElement('span')
|
||||
elem.id = "GECKO_TEST_DONE";
|
||||
document.body.appendChild(elem);
|
||||
}
|
||||
|
3
servo/etc/ci/performance/page_load_test/example.manifest
Normal file
3
servo/etc/ci/performance/page_load_test/example.manifest
Normal file
@ -0,0 +1,3 @@
|
||||
http://localhost:8000/page_load_test/example/example_sync.html
|
||||
async http://localhost:8000/page_load_test/example/example_async.html
|
||||
|
@ -0,0 +1,23 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width" />
|
||||
<title></title>
|
||||
</head>
|
||||
<body>
|
||||
Hello World
|
||||
<!-- Remember to include the /harness/harness.js file
|
||||
for the mark() and done() function -->
|
||||
<script src="/harness/harness.js" type="text/javascript" charset="utf-8"></script>
|
||||
<script type="text/javascript" charset="utf-8">
|
||||
// Create a timestamp before the test
|
||||
mark("test start");
|
||||
// Do something slow here
|
||||
// Create another timestamp after the test
|
||||
mark("test end");
|
||||
// Tell the test harness you're done, otherwise it will keep waiting
|
||||
done();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
@ -0,0 +1,13 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width" />
|
||||
<title></title>
|
||||
</head>
|
||||
<body>
|
||||
Hello World
|
||||
<!-- window.performance.timing are recorded.
|
||||
Window closes after onLoad event triggered -->
|
||||
</body>
|
||||
</html>
|
@ -20,8 +20,15 @@ def load_manifest(filename):
|
||||
|
||||
|
||||
def parse_manifest(text):
|
||||
return filter(lambda x: x != "" and not x.startswith("#"),
|
||||
map(lambda x: x.strip(), text.splitlines()))
|
||||
lines = filter(lambda x: x != "" and not x.startswith("#"),
|
||||
map(lambda x: x.strip(), text.splitlines()))
|
||||
output = []
|
||||
for line in lines:
|
||||
if line.split(" ")[0] == "async":
|
||||
output.append((line.split(" ")[1], True))
|
||||
else:
|
||||
output.append((line.split(" ")[0], False))
|
||||
return output
|
||||
|
||||
|
||||
def execute_test(url, command, timeout):
|
||||
@ -39,7 +46,12 @@ def execute_test(url, command, timeout):
|
||||
return ""
|
||||
|
||||
|
||||
def run_servo_test(url, timeout):
|
||||
def run_servo_test(url, timeout, is_async):
|
||||
if is_async:
|
||||
print("Servo does not support async test!")
|
||||
# Return a placeholder
|
||||
return parse_log("", url)
|
||||
|
||||
ua_script_path = "{}/user-agent-js".format(os.getcwd())
|
||||
command = [
|
||||
"../../../target/release/servo", url,
|
||||
@ -157,7 +169,7 @@ def parse_log(log, testcase):
|
||||
|
||||
def filter_result_by_manifest(result_json, manifest):
|
||||
filtered = []
|
||||
for name in manifest:
|
||||
for name, is_async in manifest:
|
||||
match = [tc for tc in result_json if tc['testcase'] == name]
|
||||
if len(match) == 0:
|
||||
raise Exception(("Missing test result: {}. This will cause a "
|
||||
@ -177,7 +189,7 @@ def take_result_median(result_json, expected_runs):
|
||||
|
||||
median_result = {}
|
||||
for k, _ in group[0].items():
|
||||
if k == "testcase":
|
||||
if k == "testcase" or k == "title":
|
||||
median_result[k] = group[0][k]
|
||||
else:
|
||||
try:
|
||||
@ -257,14 +269,14 @@ def main():
|
||||
# Assume the server is up and running
|
||||
testcases = load_manifest(args.tp5_manifest)
|
||||
results = []
|
||||
for testcase in testcases:
|
||||
for testcase, is_async in testcases:
|
||||
for run in range(args.runs):
|
||||
print("Running test {}/{} on {}".format(run + 1,
|
||||
args.runs,
|
||||
testcase))
|
||||
# results will be a mixure of timings dict and testcase strings
|
||||
# testcase string indicates a failed test
|
||||
results += run_test(testcase, args.timeout)
|
||||
results += run_test(testcase, args.timeout, is_async)
|
||||
print("Finished")
|
||||
# TODO: Record and analyze other performance.timing properties
|
||||
|
||||
|
@ -40,7 +40,8 @@ python3 -m http.server > /dev/null 2>&1 &
|
||||
# TODO: enable the full manifest when #11087 is fixed
|
||||
# https://github.com/servo/servo/issues/11087
|
||||
# MANIFEST="page_load_test/test.manifest"
|
||||
MANIFEST="page_load_test/tp5n/20160509.manifest" # A manifest that excludes
|
||||
# MANIFEST="page_load_test/tp5n/20160509.manifest" # A manifest that excludes
|
||||
MANIFEST="page_load_test/example.manifest" # A manifest that excludes
|
||||
# timeout test cases
|
||||
PERF_FILE="output/perf-$(date +%s).json"
|
||||
|
||||
|
@ -163,6 +163,7 @@ def test_log_parser_empty():
|
||||
|
||||
expected = [{
|
||||
"testcase": "http://localhost:8000/page_load_test/56.com/www.56.com/index.html",
|
||||
"title": "",
|
||||
"navigationStart": 0,
|
||||
"unloadEventStart": -1,
|
||||
"unloadEventEnd": -1,
|
||||
@ -195,6 +196,7 @@ def test_log_parser_error():
|
||||
|
||||
expected = [{
|
||||
"testcase": "http://localhost:8000/page_load_test/56.com/www.56.com/index.html",
|
||||
"title": "",
|
||||
"navigationStart": 0,
|
||||
"unloadEventStart": -1,
|
||||
"unloadEventEnd": -1,
|
||||
@ -254,6 +256,7 @@ Shutting down the Constellation after generating an output file or exit flag spe
|
||||
|
||||
expected = [{
|
||||
"testcase": "http://localhost:8000/page_load_test/56.com/www.56.com/index.html",
|
||||
"title": "",
|
||||
"navigationStart": 0,
|
||||
"unloadEventStart": -1,
|
||||
"unloadEventEnd": -1,
|
||||
@ -290,9 +293,22 @@ http://localhost/page_load_test/tp5n/aljazeera.net/aljazeera.net/portal.html
|
||||
# Disabled! http://localhost/page_load_test/tp5n/aljazeera.net/aljazeera.net/portal.html
|
||||
'''
|
||||
expected = [
|
||||
"http://localhost/page_load_test/tp5n/163.com/www.163.com/index.html",
|
||||
"http://localhost/page_load_test/tp5n/56.com/www.56.com/index.html",
|
||||
"http://localhost/page_load_test/tp5n/aljazeera.net/aljazeera.net/portal.html"
|
||||
("http://localhost/page_load_test/tp5n/163.com/www.163.com/index.html", False),
|
||||
("http://localhost/page_load_test/tp5n/56.com/www.56.com/index.html", False),
|
||||
("http://localhost/page_load_test/tp5n/aljazeera.net/aljazeera.net/portal.html", False)
|
||||
]
|
||||
assert(expected == list(runner.parse_manifest(text)))
|
||||
|
||||
|
||||
def test_manifest_loader_async():
|
||||
|
||||
text = '''
|
||||
http://localhost/page_load_test/tp5n/163.com/www.163.com/index.html
|
||||
async http://localhost/page_load_test/tp5n/56.com/www.56.com/index.html
|
||||
'''
|
||||
expected = [
|
||||
("http://localhost/page_load_test/tp5n/163.com/www.163.com/index.html", False),
|
||||
("http://localhost/page_load_test/tp5n/56.com/www.56.com/index.html", True),
|
||||
]
|
||||
assert(expected == list(runner.parse_manifest(text)))
|
||||
|
||||
@ -315,7 +331,7 @@ def test_filter_result_by_manifest():
|
||||
}]
|
||||
|
||||
manifest = [
|
||||
"http://localhost:8000/page_load_test/56.com/www.56.com/index.html",
|
||||
("http://localhost:8000/page_load_test/56.com/www.56.com/index.html", False)
|
||||
]
|
||||
|
||||
assert(expected == runner.filter_result_by_manifest(input_json, manifest))
|
||||
@ -328,8 +344,8 @@ def test_filter_result_by_manifest_error():
|
||||
}]
|
||||
|
||||
manifest = [
|
||||
"1.html",
|
||||
"2.html"
|
||||
("1.html", False),
|
||||
("2.html", False)
|
||||
]
|
||||
|
||||
with pytest.raises(Exception) as execinfo:
|
||||
|
Loading…
Reference in New Issue
Block a user