Bug 1482083 [wpt PR 12377] - Replace document.origin with self.origin in cookie-helper.sub.js, a=testonly

Automatic update from web-platform-testsReplace document.origin with self.origin in cookie-helper.sub.js (#12377)

In anticipation of https://github.com/whatwg/dom/issues/410
--

wpt-commits: 0e1ac363581d4bf0851a00a5563619bfef622fe4
wpt-pr: 12377
This commit is contained in:
Philip Jägenstedt 2018-08-15 01:00:36 +00:00 committed by moz-wptsync-bot
parent 90a2385cef
commit aa6354a609
23 changed files with 1983 additions and 110 deletions

View File

@ -294958,6 +294958,36 @@
{}
]
],
"resources/test/tests/functional/add_cleanup_async.html": [
[
{}
]
],
"resources/test/tests/functional/add_cleanup_async_bad_return.html": [
[
{}
]
],
"resources/test/tests/functional/add_cleanup_async_rejection.html": [
[
{}
]
],
"resources/test/tests/functional/add_cleanup_async_rejection_after_load.html": [
[
{}
]
],
"resources/test/tests/functional/add_cleanup_async_timeout.html": [
[
{}
]
],
"resources/test/tests/functional/add_cleanup_bad_return.html": [
[
{}
]
],
"resources/test/tests/functional/add_cleanup_count.html": [
[
{}
@ -294973,6 +295003,11 @@
{}
]
],
"resources/test/tests/functional/add_cleanup_sync_queue.html": [
[
{}
]
],
"resources/test/tests/functional/api-tests-1.html": [
[
{}
@ -295068,11 +295103,21 @@
{}
]
],
"resources/test/tests/functional/promise-with-sync.html": [
[
{}
]
],
"resources/test/tests/functional/promise.html": [
[
{}
]
],
"resources/test/tests/functional/queue.html": [
[
{}
]
],
"resources/test/tests/functional/single-page-test-fail.html": [
[
{}
@ -295093,6 +295138,16 @@
{}
]
],
"resources/test/tests/functional/task-scheduling-promise-test.html": [
[
{}
]
],
"resources/test/tests/functional/task-scheduling-test.html": [
[
{}
]
],
"resources/test/tests/functional/uncaught-exception-handle.html": [
[
{}
@ -295108,7 +295163,7 @@
{}
]
],
"resources/test/tests/functional/worker-dedicated.html": [
"resources/test/tests/functional/worker-dedicated.sub.html": [
[
{}
]
@ -295203,6 +295258,11 @@
{}
]
],
"resources/test/tests/unit/exceptional-cases.html": [
[
{}
]
],
"resources/test/tox.ini": [
[
{}
@ -453636,7 +453696,7 @@
"testharness"
],
"cookies/resources/cookie-helper.sub.js": [
"852fbb6bb64726bec094a2b3ec5cf17d289116b0",
"428cab042e4fd0ac3f67180587f4f42e42643049",
"support"
],
"cookies/resources/drop.py": [
@ -578844,7 +578904,7 @@
"support"
],
"docs/_writing-tests/testharness-api.md": [
"92a9536b69030fd8bb930f6c325a4e2bf4a2eaf7",
"bb5524532915a58e4fab3c3bb89a41bbe2a46b4a",
"support"
],
"docs/_writing-tests/testharness.md": [
@ -605716,7 +605776,7 @@
"support"
],
"lint.whitelist": [
"de9e36cf12ff4b5b3158f2f383aae70753043d2f",
"bd9bd46c52250849267c8a9fa6c8f72129f20140",
"support"
],
"longtask-timing/META.yml": [
@ -625644,7 +625704,7 @@
"support"
],
"resources/test/conftest.py": [
"801c97a1f9299805abe8f25726a2d4feb374f7d0",
"8765bf835dfc241d5cbd15e9697de465d4289bfb",
"support"
],
"resources/test/harness.html": [
@ -625659,16 +625719,44 @@
"31fe19c4bceb96a1a6904706a7369d7c10e1ed12",
"support"
],
"resources/test/tests/functional/add_cleanup_async.html": [
"9d0ade4150a25ef60bde2b09881398c226cea703",
"support"
],
"resources/test/tests/functional/add_cleanup_async_bad_return.html": [
"0b45362f1643bae215b22137eb7fc2f586993f65",
"support"
],
"resources/test/tests/functional/add_cleanup_async_rejection.html": [
"0528b4254f671b0c629fb1df7b7e81aed2b41af5",
"support"
],
"resources/test/tests/functional/add_cleanup_async_rejection_after_load.html": [
"bd8fb379c787457675a41c141c2cddf886450682",
"support"
],
"resources/test/tests/functional/add_cleanup_async_timeout.html": [
"5cb04d5a8ba7f895568dddbbf12987cd0ff3a83e",
"support"
],
"resources/test/tests/functional/add_cleanup_bad_return.html": [
"1f1c6fbf44b5d505d172e213b93261bd20c45fb6",
"support"
],
"resources/test/tests/functional/add_cleanup_count.html": [
"03f6f11e3294a7940638d59f914819f1e7293a2b",
"38fd8fd7a10e9e52e7738902dd0751dccc581e79",
"support"
],
"resources/test/tests/functional/add_cleanup_err.html": [
"d9fd1375e9bf738a9eaf98eb84e15a20aa141a79",
"9997281242a613ea14e6e36b4151129d4e058d7e",
"support"
],
"resources/test/tests/functional/add_cleanup_err_multi.html": [
"7891c12d77a28493501951e9cbb3bece2ddda39b",
"a489b96659066fb5db98c3146f3ae90b53cc53f5",
"support"
],
"resources/test/tests/functional/add_cleanup_sync_queue.html": [
"1e058f150136cf4ab9a3e5e1b4c16a28685bd94f",
"support"
],
"resources/test/tests/functional/api-tests-1.html": [
@ -625728,7 +625816,7 @@
"support"
],
"resources/test/tests/functional/iframe-consolidate-errors.html": [
"9ba6e179faffc44f6caea124654a9e421289c3be",
"e382c6e6fd96bc40ea057a941f13816c947fa3eb",
"support"
],
"resources/test/tests/functional/iframe-consolidate-tests.html": [
@ -625747,10 +625835,18 @@
"d4c62794c4f77abf460cd484fd548a59e1ed16e3",
"support"
],
"resources/test/tests/functional/promise-with-sync.html": [
"234f5476e9cdaf8c388cdaaa2e6464bc9120fe3d",
"support"
],
"resources/test/tests/functional/promise.html": [
"bdf6dc3ec2af07a9799243cbc7b15da939961363",
"support"
],
"resources/test/tests/functional/queue.html": [
"4ea32a2bc8ee64b5841596f240291ec7fa514274",
"support"
],
"resources/test/tests/functional/single-page-test-fail.html": [
"5826a2ef15c00d817197333de1f444cf1ac51e8b",
"support"
@ -625767,6 +625863,14 @@
"9d5f776d541454cdcff985bb2ad050036d358b81",
"support"
],
"resources/test/tests/functional/task-scheduling-promise-test.html": [
"fb4cc2dd27d52573c4113aa1a6f8d833ce80c9be",
"support"
],
"resources/test/tests/functional/task-scheduling-test.html": [
"134bdb2ea8d629afaac79b0fe84b3ae570445b17",
"support"
],
"resources/test/tests/functional/uncaught-exception-handle.html": [
"4c960186e0d29885aebeb379181ed181ccc26d1d",
"support"
@ -625779,8 +625883,8 @@
"760151832e81f8ef61d510b252d0cd1d7d843495",
"support"
],
"resources/test/tests/functional/worker-dedicated.html": [
"a790a1520ceed96f254b1f5415d0415f7bf3a456",
"resources/test/tests/functional/worker-dedicated.sub.html": [
"586326d876119da03413a6473b042885abee7741",
"support"
],
"resources/test/tests/functional/worker-error.js": [
@ -625855,6 +625959,10 @@
"7eef4a8fa5b50547bce915170a9b3e1e0312adf4",
"support"
],
"resources/test/tests/unit/exceptional-cases.html": [
"df9e1239a2ec48dd8b489fb7001a5295e334f963",
"support"
],
"resources/test/tox.ini": [
"d3a30f870a1572d4423ae99f64c67d63afa345da",
"support"
@ -625888,7 +625996,7 @@
"support"
],
"resources/testharness.js": [
"0ea7a2a7f48424708ef661e2dbcecdb8b916c81a",
"f0c24635017dad6275c99dc149ab1739470eeb36",
"support"
],
"resources/testharness.js.headers": [

View File

@ -48,19 +48,19 @@ function assert_cookie(origin, obj, name, value, present) {
}
// Remove the cookie named |name| from |origin|, then set it on |origin| anew.
// If |origin| matches `document.origin`, also assert (via `document.cookie`) that
// If |origin| matches `self.origin`, also assert (via `document.cookie`) that
// the cookie was correctly removed and reset.
function create_cookie(origin, name, value, extras) {
alert("Create_cookie: " + origin + "/cookies/resources/drop.py?name=" + name);
return credFetch(origin + "/cookies/resources/drop.py?name=" + name)
.then(_ => {
if (origin == document.origin)
if (origin == self.origin)
assert_dom_cookie(name, value, false);
})
.then(_ => {
return credFetch(origin + "/cookies/resources/set.py?" + name + "=" + value + ";path=/;" + extras)
.then(_ => {
if (origin == document.origin)
if (origin == self.origin)
assert_dom_cookie(name, value, true);
});
});
@ -96,7 +96,7 @@ function set_prefixed_cookie_via_http_test(options) {
var name = options.prefix + "prefixtestcookie";
if (!options.origin) {
options.origin = document.origin;
options.origin = self.origin;
erase_cookie_from_js(name);
return postDelete;
} else {
@ -116,12 +116,12 @@ window.SameSiteStatus = {
STRICT: "strict"
};
// Reset SameSite test cookies on |origin|. If |origin| matches `document.origin`, assert
// Reset SameSite test cookies on |origin|. If |origin| matches `self.origin`, assert
// (via `document.cookie`) that they were properly removed and reset.
function resetSameSiteCookies(origin, value) {
return credFetch(origin + "/cookies/resources/dropSameSite.py")
.then(_ => {
if (origin == document.origin) {
if (origin == self.origin) {
assert_dom_cookie("samesite_strict", value, false);
assert_dom_cookie("samesite_lax", value, false);
assert_dom_cookie("samesite_none", value, false);
@ -130,7 +130,7 @@ function resetSameSiteCookies(origin, value) {
.then(_ => {
return credFetch(origin + "/cookies/resources/setSameSite.py?" + value)
.then(_ => {
if (origin == document.origin) {
if (origin == self.origin) {
assert_dom_cookie("samesite_strict", value, true);
assert_dom_cookie("samesite_lax", value, true);
assert_dom_cookie("samesite_none", value, true);
@ -164,12 +164,12 @@ window.SecureStatus = {
BOTH_COOKIES: "2",
};
//Reset SameSite test cookies on |origin|. If |origin| matches `document.origin`, assert
//Reset SameSite test cookies on |origin|. If |origin| matches `self.origin`, assert
//(via `document.cookie`) that they were properly removed and reset.
function resetSecureCookies(origin, value) {
return credFetch(origin + "/cookies/resources/dropSecure.py")
.then(_ => {
if (origin == document.origin) {
if (origin == self.origin) {
assert_dom_cookie("alone_secure", value, false);
assert_dom_cookie("alone_insecure", value, false);
}

View File

@ -318,6 +318,16 @@ the test result is known. For example:
}, "Calling document.getElementById with a null argument.");
```
If the test was created using the `promise_test` API, then cleanup functions
may optionally return a "thenable" value (i.e. an object which defines a `then`
method). `testharness.js` will assume that such values conform to [the
ECMAScript standard for
Promises](https://tc39.github.io/ecma262/#sec-promise-objects) and delay the
completion of the test until all "thenables" provided in this way have settled.
All callbacks will be invoked synchronously; tests that require more complex
cleanup behavior should manage execution order explicitly. If any of the
eventual values are rejected, the test runner will report an error.
## Timeouts in Tests ##
In general the use of timeouts in tests is discouraged because this is

View File

@ -293,8 +293,12 @@ SET TIMEOUT: html/dom/documents/dom-tree-accessors/Document.currentScript.html
SET TIMEOUT: html/webappapis/timers/*
SET TIMEOUT: resources/chromium/*
SET TIMEOUT: resources/test/tests/functional/add_cleanup.html
SET TIMEOUT: resources/test/tests/functional/add_cleanup_async.html
SET TIMEOUT: resources/test/tests/functional/add_cleanup_async_rejection.html
SET TIMEOUT: resources/test/tests/functional/add_cleanup_async_rejection_after_load.html
SET TIMEOUT: resources/test/tests/functional/api-tests-1.html
SET TIMEOUT: resources/test/tests/functional/worker.js
SET TIMEOUT: resources/test/tests/unit/exceptional-cases.html
SET TIMEOUT: resources/testharness.js
# setTimeout use in reftests

View File

@ -1,6 +1,8 @@
import io
import json
import os
import ssl
import urllib2
import html5lib
import pytest
@ -8,7 +10,6 @@ from selenium import webdriver
from wptserver import WPTServer
ENC = 'utf8'
HERE = os.path.dirname(os.path.abspath(__file__))
WPT_ROOT = os.path.normpath(os.path.join(HERE, '..', '..'))
HARNESS = os.path.join(HERE, 'harness.html')
@ -30,6 +31,16 @@ def pytest_configure(config):
config.driver = webdriver.Firefox(firefox_binary=config.getoption("--binary"))
config.server = WPTServer(WPT_ROOT)
config.server.start()
# Although the name of the `_create_unverified_context` method suggests
# that it is not intended for external consumption, the standard library's
# documentation explicitly endorses its use:
#
# > To revert to the previous, unverified, behavior
# > ssl._create_unverified_context() can be passed to the context
# > parameter.
#
# https://docs.python.org/2/library/httplib.html#httplib.HTTPSConnection
config.ssl_context = ssl._create_unverified_context()
config.add_cleanup(config.server.stop)
config.add_cleanup(config.driver.quit)
@ -45,16 +56,22 @@ def resolve_uri(context, uri):
class HTMLItem(pytest.Item, pytest.Collector):
def __init__(self, filename, test_type, parent):
self.filename = filename
self.url = parent.session.config.server.url(filename)
self.type = test_type
self.variants = []
# Some tests are reliant on the WPT servers substitution functionality,
# so tests must be retrieved from the server rather than read from the
# file system directly.
handle = urllib2.urlopen(self.url,
context=parent.session.config.ssl_context)
try:
markup = handle.read()
finally:
handle.close()
if test_type not in TEST_TYPES:
raise ValueError('Unrecognized test type: "%s"' % test_type)
with io.open(filename, encoding=ENC) as f:
markup = f.read()
parsed = html5lib.parse(markup, namespaceHTMLElements=False)
name = None
includes_variants_script = False
@ -94,7 +111,7 @@ class HTMLItem(pytest.Item, pytest.Collector):
def reportinfo(self):
return self.fspath, None, self.filename
return self.fspath, None, self.url
def repr_failure(self, excinfo):
return pytest.Collector.repr_failure(self, excinfo)
@ -113,7 +130,9 @@ class HTMLItem(pytest.Item, pytest.Collector):
driver.get(server.url(HARNESS))
actual = driver.execute_async_script('runTest("%s", "foo", arguments[0])' % server.url(str(self.filename)))
actual = driver.execute_async_script(
'runTest("%s", "foo", arguments[0])' % self.url
)
summarized = self._summarize(actual)
@ -132,7 +151,7 @@ class HTMLItem(pytest.Item, pytest.Collector):
driver.get(server.url(HARNESS))
test_url = server.url(str(self.filename) + variant)
test_url = self.url + variant
actual = driver.execute_async_script('runTest("%s", "foo", arguments[0])' % test_url)
# Test object ordering is not guaranteed. This weak assertion verifies

View File

@ -0,0 +1,87 @@
<!DOCTYPE HTML>
<html>
<head>
<meta name="variant" content="?keep-promise">
<title>Test#add_cleanup with Promise-returning functions</title>
<script src="../../variants.js"></script>
<script src="../../../testharness.js"></script>
<script src="../../../testharnessreport.js"></script>
</head>
<body>
<div id="log"></div>
<script>
"use strict";
var completeCount = 0;
var counts = {
afterTick: null,
afterFirst: null
};
add_result_callback(function(result_t) {
completeCount += 1;
});
promise_test(function(t) {
t.add_cleanup(function() {
return new Promise(function(resolve) {
setTimeout(function() {
counts.afterTick = completeCount;
resolve();
}, 0);
});
});
t.add_cleanup(function() {
return new Promise(function(resolve) {
setTimeout(function() {
counts.afterFirst = completeCount;
resolve();
}, 0);
});
});
return Promise.resolve();
}, 'promise_test with asynchronous cleanup');
promise_test(function() {
assert_equals(
counts.afterTick,
0,
"test is not asynchronously considered 'complete'"
);
assert_equals(
counts.afterFirst,
0,
"test is not considered 'complete' following fulfillment of first promise"
);
assert_equals(completeCount, 1);
return Promise.resolve();
}, "synchronously-defined promise_test");
</script>
<script type="text/json" id="expected">
{
"summarized_status": {
"status_string": "OK",
"message": null
},
"summarized_tests": [
{
"status_string": "PASS",
"name": "promise_test with asynchronous cleanup",
"message": null,
"properties": {}
},
{
"status_string": "PASS",
"name": "synchronously-defined promise_test",
"message": null,
"properties": {}
}
],
"type": "complete"
}
</script>
</body>
</html>

View File

@ -0,0 +1,52 @@
<!DOCTYPE HTML>
<html>
<head>
<meta name="variant" content="?keep-promise">
<title>Test#add_cleanup with non-thenable-returning function</title>
<script src="../../variants.js"></script>
<script src="../../../testharness.js"></script>
<script src="../../../testharnessreport.js"></script>
</head>
<body>
<div id="log"></div>
<script>
"use strict";
promise_test(function(t) {
t.add_cleanup(function() {});
t.add_cleanup(function() {
return { then: 9 };
});
t.add_cleanup(function() { return Promise.resolve(); });
return Promise.resolve();
}, "promise_test that returns a non-thenable object in one \"cleanup\" callback");
promise_test(function() {}, "The test runner is in an unpredictable state ('NOT RUN')");
</script>
<script type="text/json" id="expected">
{
"summarized_status": {
"status_string": "ERROR",
"message": "Test named 'promise_test that returns a non-thenable object in one \"cleanup\" callback' specified 3 'cleanup' functions, and 1 returned a non-thenable value."
},
"summarized_tests": [
{
"status_string": "NOTRUN",
"name": "The test runner is in an unpredictable state ('NOT RUN')",
"message": null,
"properties": {}
},
{
"status_string": "PASS",
"name": "promise_test that returns a non-thenable object in one \"cleanup\" callback",
"message": null,
"properties": {}
}
],
"type": "complete"
}
</script>
</body>
</html>

View File

@ -0,0 +1,96 @@
<!DOCTYPE HTML>
<html>
<head>
<meta name="variant" content="?keep-promise">
<title>Test#add_cleanup with Promise-returning functions (rejection handling)</title>
<script src="../../variants.js"></script>
<script src="../../../testharness.js"></script>
<script src="../../../testharnessreport.js"></script>
</head>
<body>
<div id="log"></div>
<script>
"use strict";
var resolve, reject;
var completeCount = 0;
add_result_callback(function(result_t) {
completeCount += 1;
});
promise_test(function(t) {
t.add_cleanup(function() {
return new Promise(function(_, _reject) { reject = _reject; });
});
t.add_cleanup(function() {
return new Promise(function(_resolve) { resolve = _resolve; });
});
// The following cleanup function defines empty tests so that the reported
// data demonstrates the intended run-time behavior without relying on the
// test harness's handling of errors during test cleanup (which is tested
// elsewhere).
t.add_cleanup(function() {
if (completeCount === 0) {
promise_test(
function() {},
"test is not asynchronously considered 'complete' ('NOT RUN')"
);
}
reject();
setTimeout(function() {
if (completeCount === 0) {
promise_test(
function() {},
"test is not considered 'complete' following rejection of first " +
"promise ('NOT RUN')"
);
}
resolve();
}, 0);
});
return Promise.resolve();
}, "promise_test with asynchronous cleanup including rejection");
promise_test(function() {}, "synchronously-defined test ('NOT RUN')");
</script>
<script type="text/json" id="expected">
{
"summarized_status": {
"status_string": "ERROR",
"message": "Test named 'promise_test with asynchronous cleanup including rejection' specified 3 'cleanup' functions, and 1 failed."
},
"summarized_tests": [
{
"status_string": "PASS",
"name": "promise_test with asynchronous cleanup including rejection",
"message": null,
"properties": {}
},
{
"status_string": "NOTRUN",
"name": "synchronously-defined test ('NOT RUN')",
"message": null,
"properties": {}
},
{
"status_string": "NOTRUN",
"name": "test is not asynchronously considered 'complete' ('NOT RUN')",
"message": null,
"properties": {}
},
{
"status_string": "NOTRUN",
"name": "test is not considered 'complete' following rejection of first promise ('NOT RUN')",
"message": null,
"properties": {}
}
],
"type": "complete"
}
</script>
</body>
</html>

View File

@ -0,0 +1,54 @@
<!DOCTYPE HTML>
<html>
<head>
<meta name="variant" content="?keep-promise">
<title>Test#add_cleanup with Promise-returning functions (rejection handling following "load" event)</title>
<script src="../../variants.js"></script>
</head>
<body>
<h1>Promise Tests</h1>
<p>This test demonstrates the use of <tt>promise_test</tt>. Assumes ECMAScript 6
Promise support. Some failures are expected.</p>
<div id="log"></div>
<script src="../../../testharness.js"></script>
<script src="../../../testharnessreport.js"></script>
<script>
promise_test(function(t) {
t.add_cleanup(function() {
return Promise.reject(new Error("foo"));
});
return new Promise((resolve) => {
document.addEventListener("DOMContentLoaded", function() {
setTimeout(resolve, 0)
});
});
}, "Test with failing cleanup that completes after DOMContentLoaded event");
promise_test(function(t) {
return Promise.resolve();
}, "Test that should not be run due to invalid harness state ('NOT RUN')");
</script>
<script type="text/json" id="expected">
{
"summarized_status": {
"status_string": "ERROR",
"message": "Test named 'Test with failing cleanup that completes after DOMContentLoaded event' specified 1 'cleanup' function, and 1 failed."
},
"summarized_tests": [
{
"status_string": "NOTRUN",
"name": "Test that should not be run due to invalid harness state ('NOT RUN')",
"message": null,
"properties": {}
},
{
"status_string": "PASS",
"name": "Test with failing cleanup that completes after DOMContentLoaded event",
"message": null,
"properties": {}
}
],
"type": "complete"
}
</script>

View File

@ -0,0 +1,59 @@
<!DOCTYPE HTML>
<html>
<head>
<meta name="variant" content="?keep-promise">
<title>Test#add_cleanup with Promise-returning functions (timeout handling)</title>
<script src="../../variants.js"></script>
<script src="../../../testharness.js"></script>
<script src="../../../testharnessreport.js"></script>
</head>
<body>
<div id="log"></div>
<script>
"use strict";
promise_test(function(t) {
t.add_cleanup(function() {
return Promise.resolve();
});
t.add_cleanup(function() {
return new Promise(function() {});
});
t.add_cleanup(function() {});
t.add_cleanup(function() {
return new Promise(function() {});
});
return Promise.resolve();
}, "promise_test with asynchronous cleanup");
promise_test(function() {}, "promise_test following timed out cleanup ('NOT RUN')");
</script>
<script type="text/json" id="expected">
{
"summarized_status": {
"status_string": "ERROR",
"message": "Timeout while running cleanup for test named \"promise_test with asynchronous cleanup\"."
},
"summarized_tests": [
{
"status_string": "NOTRUN",
"name": "promise_test following timed out cleanup ('NOT RUN')",
"message": null,
"properties": {}
},
{
"status_string": "PASS",
"name": "promise_test with asynchronous cleanup",
"message": null,
"properties": {}
}
],
"type": "complete"
}
</script>
</body>
</html>

View File

@ -0,0 +1,64 @@
<!DOCTYPE HTML>
<html>
<head>
<meta name="variant" content="">
<meta name="variant" content="?keep-promise">
<title>Test#add_cleanup with value-returning function</title>
<script src="../../variants.js"></script>
<script src="../../../testharness.js"></script>
<script src="../../../testharnessreport.js"></script>
</head>
<body>
<div id="log"></div>
<script>
"use strict";
test(function(t) {
t.add_cleanup(function() {});
t.add_cleanup(function() { return null; });
t.add_cleanup(function() {
test(
function() {},
"The test runner is in an unpredictable state #1 ('NOT RUN')"
);
throw new Error();
});
t.add_cleanup(function() { return 4; });
t.add_cleanup(function() { return { then: function() {} }; });
t.add_cleanup(function() {});
}, "Test that returns a value in three \"cleanup\" functions");
test(function() {}, "The test runner is in an unpredictable state #2 ('NOT RUN')");
</script>
<script type="text/json" id="expected">
{
"summarized_status": {
"status_string": "ERROR",
"message": "Test named 'Test that returns a value in three \"cleanup\" functions' specified 6 'cleanup' functions, and 1 failed, and 3 returned a non-undefined value."
},
"summarized_tests": [
{
"status_string": "PASS",
"name": "Test that returns a value in three \"cleanup\" functions",
"properties": {},
"message": null
},
{
"status_string": "NOTRUN",
"name": "The test runner is in an unpredictable state #1 ('NOT RUN')",
"message": null,
"properties": {}
},
{
"status_string": "NOTRUN",
"name": "The test runner is in an unpredictable state #2 ('NOT RUN')",
"message": null,
"properties": {}
}
],
"type": "complete"
}
</script>
</body>
</html>

View File

@ -4,8 +4,8 @@
<meta name="variant" content="?keep-promise">
<title>Test#add_cleanup reported count</title>
<script src="../../variants.js"></script>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="../../../testharness.js"></script>
<script src="../../../testharnessreport.js"></script>
</head>
<body>
<div id="log"></div>

View File

@ -5,8 +5,8 @@
<meta name="variant" content="?keep-promise">
<title>Test#add_cleanup: error</title>
<script src="../../variants.js"></script>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="../../../testharness.js"></script>
<script src="../../../testharnessreport.js"></script>
</head>
<body>
<div id="log"></div>

View File

@ -5,8 +5,8 @@
<meta name="variant" content="?keep-promise">
<title>Test#add_cleanup: multiple functions with one in error</title>
<script src="../../variants.js"></script>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="../../../testharness.js"></script>
<script src="../../../testharnessreport.js"></script>
</head>
<body>
<div id="log"></div>

View File

@ -0,0 +1,57 @@
<!DOCTYPE HTML>
<html>
<head>
<meta name="variant" content="?keep-promise">
<title>Test#add_cleanup: queuing tests</title>
<script src="../../variants.js"></script>
<script src="../../../testharness.js"></script>
<script src="../../../testharnessreport.js"></script>
</head>
<body>
<div id="log"></div>
<script>
"use strict";
var firstCleaned = false;
promise_test(function(t) {
promise_test(function() {
assert_true(
firstCleaned, "should not execute until first test is complete"
);
return Promise.resolve();
}, "test defined when no tests are queued, but one test is executing");
t.add_cleanup(function() {
firstCleaned = true;
});
return Promise.resolve();
}, "Test with a 'cleanup' function");
</script>
<script type="text/json" id="expected">
{
"summarized_status": {
"message": null,
"status_string": "OK"
},
"summarized_tests": [
{
"message": null,
"name": "Test with a 'cleanup' function",
"status_string": "PASS",
"properties": {}
},
{
"message": null,
"name": "test defined when no tests are queued, but one test is executing",
"status_string": "PASS",
"properties": {}
}
],
"type": "complete"
}
</script>
</body>
</html>

View File

@ -38,6 +38,12 @@ child context.</p>
"name": "Test executing in parent context",
"properties": {},
"message": null
},
{
"status_string": "NOTRUN",
"name": "This should show a harness status of 'Error' and a test status of 'Not Run'",
"properties": {},
"message": null
}
],
"type": "complete"

View File

@ -0,0 +1,81 @@
<!DOCTYPE HTML>
<html>
<head>
<meta name="variant" content="?keep-promise">
<title>Promise Tests and Synchronous Tests</title>
<script src="../../variants.js"></script>
</head>
<body>
<h1>Promise Tests</h1>
<p>This test demonstrates the use of <tt>promise_test</tt> alongside synchronous tests.</p>
<div id="log"></div>
<script src="../../../testharness.js"></script>
<script src="../../../testharnessreport.js"></script>
<script>
"use strict";
var sequence = [];
test(function(t) {
assert_array_equals(sequence, []);
sequence.push(1);
}, "first synchronous test");
promise_test(function() {
assert_array_equals(sequence, [1, 2]);
return Promise.resolve()
.then(function() {
assert_array_equals(sequence, [1, 2]);
sequence.push(3);
});
}, "first promise_test");;
test(function(t) {
assert_array_equals(sequence, [1]);
sequence.push(2);
}, "second synchronous test");
promise_test(function() {
assert_array_equals(sequence, [1, 2, 3]);
return Promise.resolve()
.then(function() {
assert_array_equals(sequence, [1, 2, 3]);
});
}, "second promise_test");;
</script>
<script type="text/json" id="expected">
{
"summarized_status": {
"message": null,
"status_string": "OK"
},
"summarized_tests": [
{
"message": null,
"properties": {},
"name": "first promise_test",
"status_string": "PASS"
},
{
"message": null,
"properties": {},
"name": "first synchronous test",
"status_string": "PASS"
},
{
"message": null,
"properties": {},
"name": "second promise_test",
"status_string": "PASS"
},
{
"message": null,
"properties": {},
"name": "second synchronous test",
"status_string": "PASS"
}
],
"type": "complete"
}
</script>

View File

@ -0,0 +1,132 @@
<!DOCTYPE HTML>
<html>
<head>
<meta name="variant" content="?keep-promise">
<title>Test queuing synchronous tests</title>
<script src="../../variants.js"></script>
<script src="../../../testharness.js"></script>
<script src="../../../testharnessreport.js"></script>
</head>
<body>
<div id="log"></div>
<script>
"use strict";
var inInitialTurn = true;
test(function(t) {
assert_true(
inInitialTurn, "should execute in the initial turn of the event loop"
);
}, "First synchronous test");
test(function(t) {
assert_true(
inInitialTurn, "should execute in the initial turn of the event loop"
);
}, "Second synchronous test");
async_test(function(t) {
assert_true(
inInitialTurn, "should execute in the initial turn of the event loop"
);
t.done();
}, "First async_test (run in parallel)");
async_test(function(t) {
assert_true(
inInitialTurn, "should execute in the initial turn of the event loop"
);
t.done();
}, "Second async_test (run in parallel)");
test(function(t) {
assert_true(
inInitialTurn, "should execute in the initial turn of the event loop"
);
}, "Third synchronous test");
promise_test(function(t) {
assert_false(
inInitialTurn, "should not execute in the initial turn of the event loop"
);
return Promise.resolve();
}, "promise_test");
async_test(function(t) {
assert_true(
inInitialTurn, "should execute in the initial turn of the event loop"
);
t.done();
}, "Third async_test (run in parallel)");
test(function(t) {
assert_true(
inInitialTurn, "should execute in the initial turn of the event loop"
);
}, "Fourth synchronous test");
inInitialTurn = false;
</script>
<script type="text/json" id="expected">
{
"summarized_status": {
"status_string": "OK",
"message": null
},
"summarized_tests": [
{
"properties": {},
"name": "First async_test (run in parallel)",
"status_string": "PASS",
"message": null
},
{
"properties": {},
"name": "First synchronous test",
"status_string": "PASS",
"message": null
},
{
"properties": {},
"name": "Fourth synchronous test",
"status_string": "PASS",
"message": null
},
{
"properties": {},
"name": "Second async_test (run in parallel)",
"status_string": "PASS",
"message": null
},
{
"properties": {},
"name": "Second synchronous test",
"status_string": "PASS",
"message": null
},
{
"properties": {},
"name": "Third async_test (run in parallel)",
"status_string": "PASS",
"message": null
},
{
"properties": {},
"name": "Third synchronous test",
"status_string": "PASS",
"message": null
},
{
"properties": {},
"name": "promise_test",
"status_string": "PASS",
"message": null
}
],
"type": "complete"
}
</script>
</body>
</html>

View File

@ -0,0 +1,243 @@
<!doctype html>
<meta name="variant" content="?keep-promise">
<title>testharness.js - task scheduling</title>
<script src="../../variants.js"></script>
<script src="../../../testharness.js"></script>
<script src="../../../testharnessreport.js"></script>
<script>
var sameTask = null;
var sameMicrotask = null;
var expectedError = new Error('This error is expected');
promise_test(function() {
return Promise.resolve()
.then(function() {
sameMirotask = true;
Promise.resolve().then(() => sameMicrotask = false);
});
}, 'promise test without cleanup #1');
promise_test(function() {
assert_false(sameMicrotask);
return Promise.resolve();
}, 'sub-test with 0 cleanup functions executes in distinct microtask from a passing sub-test');
promise_test(function() {
return Promise.resolve()
.then(function() {
sameMirotask = true;
Promise.resolve().then(() => sameMicrotask = false);
throw expectedError;
});
}, 'failing promise test without cleanup #1');
promise_test(function() {
assert_false(sameMicrotask);
return Promise.resolve();
}, 'sub-test with 0 cleanup functions executes in distinct microtask from a failing sub-test');
promise_test(function(t) {
t.add_cleanup(function() {});
return Promise.resolve()
.then(function() {
sameMirotask = true;
Promise.resolve().then(() => sameMicrotask = false);
});
}, 'promise test with cleanup #1');
promise_test(function() {
assert_false(sameMicrotask);
return Promise.resolve();
}, 'sub-test with some cleanup functions executes in distinct microtask from a passing sub-test');
promise_test(function(t) {
t.add_cleanup(function() {});
return Promise.resolve()
.then(function() {
sameMirotask = true;
Promise.resolve().then(() => sameMicrotask = false);
throw expectedError;
});
}, 'failing promise test with cleanup #1');
promise_test(function() {
assert_false(sameMicrotask);
return Promise.resolve();
}, 'sub-test with some cleanup functions executes in distinct microtask from a failing sub-test');
promise_test(function(t) {
return Promise.resolve()
.then(function() {
sameTask = true;
t.step_timeout(() => sameTask = false, 0);
});
}, 'promise test without cleanup #2');
promise_test(function() {
assert_true(sameTask);
return Promise.resolve();
}, 'sub-test with 0 cleanup functions executes in the same task as a passing sub-test');
promise_test(function(t) {
return Promise.resolve()
.then(function() {
sameTask = true;
t.step_timeout(() => sameTask = false, 0);
throw expectedError;
});
}, 'failing promise test without cleanup #2');
promise_test(function() {
assert_true(sameTask);
return Promise.resolve();
}, 'sub-test with 0 cleanup functions executes in the same task as a failing sub-test');
promise_test(function(t) {
t.add_cleanup(function() {});
return Promise.resolve()
.then(function() {
sameTask = true;
t.step_timeout(() => sameTask = false, 0);
});
}, 'promise test with cleanup #2');
promise_test(function() {
assert_true(sameTask);
return Promise.resolve();
}, 'sub-test with some cleanup functions executes in the same task as a passing sub-test');
promise_test(function(t) {
t.add_cleanup(function() {});
return Promise.resolve()
.then(function() {
sameTask = true;
t.step_timeout(() => sameTask = false, 0);
throw expectedError;
});
}, 'failing promise test with cleanup #2');
promise_test(function() {
assert_true(sameTask);
return Promise.resolve();
}, 'sub-test with some cleanup functions executes in the same task as a failing sub-test');
</script>
<script type="text/json" id="expected">
{
"summarized_status": {
"message": null,
"status_string": "OK"
},
"summarized_tests": [
{
"message": "promise_test: Unhandled rejection with value: object \"Error: This error is expected\"",
"name": "failing promise test with cleanup #1",
"properties": {},
"status_string": "FAIL"
},
{
"message": "promise_test: Unhandled rejection with value: object \"Error: This error is expected\"",
"name": "failing promise test with cleanup #2",
"properties": {},
"status_string": "FAIL"
},
{
"message": "promise_test: Unhandled rejection with value: object \"Error: This error is expected\"",
"name": "failing promise test without cleanup #1",
"properties": {},
"status_string": "FAIL"
},
{
"message": "promise_test: Unhandled rejection with value: object \"Error: This error is expected\"",
"name": "failing promise test without cleanup #2",
"properties": {},
"status_string": "FAIL"
},
{
"message": null,
"name": "promise test with cleanup #1",
"properties": {},
"status_string": "PASS"
},
{
"message": null,
"name": "promise test with cleanup #2",
"properties": {},
"status_string": "PASS"
},
{
"message": null,
"name": "promise test without cleanup #1",
"properties": {},
"status_string": "PASS"
},
{
"message": null,
"name": "promise test without cleanup #2",
"properties": {},
"status_string": "PASS"
},
{
"message": null,
"name": "sub-test with 0 cleanup functions executes in distinct microtask from a failing sub-test",
"properties": {},
"status_string": "PASS"
},
{
"message": null,
"name": "sub-test with 0 cleanup functions executes in distinct microtask from a passing sub-test",
"properties": {},
"status_string": "PASS"
},
{
"message": null,
"name": "sub-test with 0 cleanup functions executes in the same task as a failing sub-test",
"properties": {},
"status_string": "PASS"
},
{
"message": null,
"name": "sub-test with 0 cleanup functions executes in the same task as a passing sub-test",
"properties": {},
"status_string": "PASS"
},
{
"message": null,
"name": "sub-test with some cleanup functions executes in distinct microtask from a failing sub-test",
"properties": {},
"status_string": "PASS"
},
{
"message": null,
"name": "sub-test with some cleanup functions executes in distinct microtask from a passing sub-test",
"properties": {},
"status_string": "PASS"
},
{
"message": null,
"name": "sub-test with some cleanup functions executes in the same task as a failing sub-test",
"properties": {},
"status_string": "PASS"
},
{
"message": null,
"name": "sub-test with some cleanup functions executes in the same task as a passing sub-test",
"properties": {},
"status_string": "PASS"
}
],
"type": "complete"
}
</script>

View File

@ -0,0 +1,144 @@
<!doctype html>
<meta name="variant" content="">
<meta name="variant" content="?keep-promise">
<title>testharness.js - task scheduling</title>
<script src="../../variants.js"></script>
<script src="../../../testharness.js"></script>
<script src="../../../testharnessreport.js"></script>
<script>
var sameMicrotask = null;
var expectedError = new Error('This error is expected');
// Derived from `immediate`
// https://github.com/calvinmetcalf/immediate/blob/c353bd2106648cee1d525bfda22cfc4456e69c0e/lib/mutation.js
function microTask(callback) {
var observer = new MutationObserver(callback);
var element = document.createTextNode('');
observer.observe(element, {
characterData: true
});
element.data = true;
};
async_test(function(t) {
var microtask_ran = false;
t.step_timeout(t.step_func(function() {
assert_true(microtask_ran, 'function registered as a microtask was executed before task');
t.done();
}), 0);
microTask(function() {
microtask_ran = true;
});
}, 'precondition: microtask creation logic functions as expected');
test(function() {
sameMicrotask = true;
microTask(function() { sameMicrotask = false; });
}, 'synchronous test without cleanup');
test(function() {
assert_true(sameMicrotask);
}, 'sub-test with 0 cleanup functions executes in the same microtask as a passing sub-test');
test(function() {
sameMicrotask = true;
microTask(function() { sameMicrotask = false; });
throw expectedError;
}, 'failing synchronous test without cleanup');
test(function() {
assert_true(sameMicrotask);
}, 'sub-test with 0 cleanup functions executes in the same microtask as a failing sub-test');
test(function(t) {
t.add_cleanup(function() {});
sameMicrotask = true;
microTask(function() { sameMicrotask = false; });
}, 'synchronous test with cleanup');
test(function() {
assert_true(sameMicrotask);
}, 'sub-test with some cleanup functions executes in the same microtask as a passing sub-test');
test(function(t) {
t.add_cleanup(function() {});
sameMicrotask = true;
microTask(function() { sameMicrotask = false; });
throw expectedError;
}, 'failing synchronous test with cleanup');
test(function() {
assert_true(sameMicrotask);
}, 'sub-test with some cleanup functions executes in the same microtask as a failing sub-test');
</script>
<script type="text/json" id="expected">
{
"summarized_status": {
"message": null,
"status_string": "OK"
},
"summarized_tests": [
{
"message": "This error is expected",
"name": "failing synchronous test with cleanup",
"properties": {},
"status_string": "FAIL"
},
{
"message": "This error is expected",
"name": "failing synchronous test without cleanup",
"properties": {},
"status_string": "FAIL"
},
{
"message": null,
"name": "precondition: microtask creation logic functions as expected",
"properties": {},
"status_string": "PASS"
},
{
"message": null,
"name": "sub-test with 0 cleanup functions executes in the same microtask as a failing sub-test",
"properties": {},
"status_string": "PASS"
},
{
"message": null,
"name": "sub-test with 0 cleanup functions executes in the same microtask as a passing sub-test",
"properties": {},
"status_string": "PASS"
},
{
"message": null,
"name": "sub-test with some cleanup functions executes in the same microtask as a failing sub-test",
"properties": {},
"status_string": "PASS"
},
{
"message": null,
"name": "sub-test with some cleanup functions executes in the same microtask as a passing sub-test",
"properties": {},
"status_string": "PASS"
},
{
"message": null,
"name": "synchronous test with cleanup",
"properties": {},
"status_string": "PASS"
},
{
"message": null,
"name": "synchronous test without cleanup",
"properties": {},
"status_string": "PASS"
}
],
"type": "complete"
}
</script>

View File

@ -5,8 +5,8 @@
<meta name="variant" content="?keep-promise">
<title>Dedicated Worker Tests</title>
<script src="../../variants.js"></script>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="../../../testharness.js"></script>
<script src="../../../testharnessreport.js"></script>
</head>
<body>
<h1>Dedicated Web Worker Tests</h1>
@ -33,7 +33,7 @@ test(function(t) {
{
"summarized_status": {
"status_string": "ERROR",
"message": "Error: This failure is expected."
"message": "Error in remote https://{{domains[]}}:{{ports[https][0]}}/resources/test/tests/functional/worker-error.js: Error: This failure is expected."
},
"summarized_tests": [
{

View File

@ -0,0 +1,404 @@
<!DOCTYPE HTML>
<html>
<head>
<meta charset="utf-8">
<meta name="timeout" content="long">
<script src="/resources/testharness.js"></script>
<title>Exceptional cases</title>
</head>
<body>
<script>
function makeTest(...bodies) {
const closeScript = '<' + '/script>';
let src = `
<!DOCTYPE HTML>
<html>
<head>
<title>Document title</title>
<script src="/resources/testharness.js?${Math.random()}">${closeScript}
</head>
<body>
<div id="log"></div>`;
bodies.forEach((body) => {
src += '<script>(' + body + ')();' + closeScript;
});
const iframe = document.createElement('iframe');
document.body.appendChild(iframe);
iframe.contentDocument.write(src);
return new Promise((resolve) => {
window.addEventListener('message', function onMessage(e) {
if (e.source !== iframe.contentWindow) {
return;
}
if (!e.data || e.data.type !=='complete') {
return;
}
window.removeEventListener('message', onMessage);
resolve(e.data);
});
iframe.contentDocument.close();
}).then(({ tests, status }) => {
const summary = {
harness: getEnumProp(status, status.status),
tests: {}
};
tests.forEach((test) => {
summary.tests[test.name] = getEnumProp(test, test.status);
});
return summary;
});
}
function getEnumProp(object, value) {
for (let property in object) {
if (!/^[A-Z]+$/.test(property)) {
continue;
}
if (object[property] === value) {
return property;
}
}
}
promise_test(() => {
return makeTest(
() => { throw new Error('this error is expected'); }
).then(({harness, tests}) => {
assert_equals(harness, 'OK');
assert_equals(tests['Document title'], 'FAIL');
});
}, 'uncaught exception during single-page test');
promise_test(() => {
return makeTest(
() => {
async_test((t) => {
setTimeout(() => {
setTimeout(() => t.done(), 0);
async_test((t) => setTimeout(t.done.bind(t), 0), 'after');
throw new Error('this error is expected');
}, 0);
}, 'during');
}
).then(({harness, tests}) => {
assert_equals(harness, 'ERROR');
assert_equals(tests.during, 'PASS');
assert_equals(tests.after, 'PASS');
});
}, 'uncaught exception during async_test');
promise_test(() => {
return makeTest(
() => {
promise_test(() => {
return new Promise((resolve) => {
setTimeout(() => {
resolve();
promise_test(() => Promise.resolve(), 'after');
throw new Error('this error is expected');
}, 0);
});
}, 'during');
}
).then(({harness, tests}) => {
assert_equals(harness, 'ERROR');
assert_equals(tests.during, 'PASS');
assert_equals(tests.after, 'PASS');
});
}, 'uncaught exception during promise_test');
promise_test(() => {
window.asyncTestCleanupCount = 0;
return makeTest(
() => {
async_test((t) => {
t.add_cleanup(() => window.parent.asyncTestCleanupCount += 1);
setTimeout(() => {
throw new Error('this error is expected');
});
}, 'test');
}
).then(({harness, tests}) => {
assert_equals(harness, 'ERROR');
assert_equals(tests.test, 'TIMEOUT');
assert_equals(window.asyncTestCleanupCount, 1);
});
}, 'uncaught exception during async_test which times out');
promise_test(() => {
window.promiseTestCleanupCount = 0;
return makeTest(
() => {
promise_test((t) => {
t.add_cleanup(() => window.parent.promiseTestCleanupCount += 1);
setTimeout(() => {
throw new Error('this error is expected');
});
return new Promise(() => {});
}, 'test');
}
).then(({harness, tests}) => {
assert_equals(harness, 'ERROR');
assert_equals(tests.test, 'TIMEOUT');
assert_equals(window.promiseTestCleanupCount, 1);
});
}, 'uncaught exception during promise_test which times out');
promise_test(() => {
return makeTest(
() => { test(() => {}, 'before'); },
() => { throw new Error('this error is expected'); },
() => { test(() => {}, 'after'); }
).then(({harness, tests}) => {
assert_equals(harness, 'ERROR');
assert_equals(tests.before, 'PASS');
assert_equals(tests.after, 'PASS');
});
}, 'uncaught exception between tests');
promise_test(() => {
return makeTest(
() => { async_test((t) => setTimeout(t.done.bind(t), 0), 'before'); },
() => { async_test('pending'); },
() => { throw new Error('this error is expected'); },
() => { async_test((t) => setTimeout(t.done.bind(t), 0), 'after'); }
).then(({harness, tests}) => {
assert_equals(harness, 'ERROR');
assert_equals(tests.before, 'PASS');
assert_equals(tests.pending, 'NOTRUN');
assert_equals(tests.after, 'PASS');
});
}, 'uncaught exception between async_tests');
promise_test(() => {
return makeTest(
() => { promise_test(() => Promise.resolve(), 'before'); },
() => { throw new Error('this error is expected'); },
() => { promise_test(() => Promise.resolve(), 'after'); }
).then(({harness, tests}) => {
assert_equals(harness, 'ERROR');
assert_equals(tests.before, 'PASS');
assert_equals(tests.after, 'PASS');
});
}, 'uncaught exception between promise_tests');
// This feature of testharness.js is only observable in browsers which
// implement the `unhandledrejection` event.
if ('onunhandledrejection' in window) {
promise_test(() => {
return makeTest(
() => { Promise.reject(new Error('this error is expected')); }
).then(({harness, tests}) => {
assert_equals(harness, 'OK');
assert_equals(tests['Document title'], 'FAIL');
});
}, 'unhandled rejection during single-page test');
promise_test(() => {
return makeTest(
() => {
async_test('pending');
async_test((t) => {
Promise.reject(new Error('this error is expected'));
window.addEventListener('unhandledrejection', () => {
setTimeout(() => t.done(), 0);
async_test((t) => setTimeout(t.done.bind(t), 0), 'after');
t.done();
});
}, 'during');
}
).then(({harness, tests}) => {
assert_equals(harness, 'ERROR');
assert_equals(tests.pending, 'NOTRUN');
assert_equals(tests.during, 'PASS');
assert_equals(tests.after, 'PASS');
});
}, 'unhandled rejection during async_test');
promise_test(() => {
return makeTest(
() => {
promise_test(() => {
return new Promise((resolve) => {
Promise.reject(new Error('this error is expected'));
window.addEventListener('unhandledrejection', () => {
resolve();
promise_test(() => Promise.resolve(), 'after');
throw new Error('this error is expected');
}, 0);
});
}, 'during');
}
).then(({harness, tests}) => {
assert_equals(harness, 'ERROR');
assert_equals(tests.during, 'PASS');
assert_equals(tests.after, 'PASS');
});
}, 'unhandled rejection during promise_test');
promise_test(() => {
return makeTest(
() => {
setup({ explicit_done: true });
test(() => {}, 'before');
Promise.reject(new Error('this error is expected'));
window.addEventListener('unhandledrejection', () => {
test(() => {}, 'after');
done();
});
}
).then(({harness, tests}) => {
assert_equals(harness, 'ERROR');
assert_equals(tests.before, 'PASS');
// TODO: investigate why this is not present
assert_false('after' in tests);
});
}, 'unhandled rejection between tests');
promise_test(() => {
return makeTest(
() => {
setup({ explicit_done: true });
async_test((t) => setTimeout(t.done.bind(t), 0), 'before');
Promise.reject(new Error('this error is expected'));
window.addEventListener('unhandledrejection', () => {
async_test((t) => setTimeout(t.done.bind(t), 0), 'after');
done();
});
}
).then(({harness, tests}) => {
assert_equals(harness, 'ERROR');
assert_equals(tests.before, 'PASS');
assert_equals(tests.after, 'PASS');
});
}, 'unhandled rejection between async_tests');
promise_test(() => {
return makeTest(
() => {
setup({ explicit_done: true });
promise_test(() => Promise.resolve(), 'before');
Promise.reject(new Error('this error is expected'));
window.addEventListener('unhandledrejection', () => {
promise_test(() => Promise.resolve(), 'after');
done();
});
}
).then(({harness, tests}) => {
assert_equals(harness, 'ERROR');
assert_equals(tests.before, 'PASS');
// TODO: investigate why this is not present
assert_false('after' in tests);
});
}, 'unhandled rejection between promise_tests');
promise_test(() => {
return makeTest(
() => {
test((t) => {
t.add_cleanup(() => { throw new Error('this error is expected'); });
}, 'during');
test((t) => {}, 'after');
}
).then(({harness, tests}) => {
assert_equals(harness, 'ERROR');
assert_equals(tests.during, 'PASS');
assert_equals(tests.after, 'NOTRUN');
});
}, 'exception in `add_cleanup` of a test');
}
promise_test(() => {
return makeTest(
() => {
setup({explicit_done: true});
window.addEventListener('DOMContentLoaded', () => {
async_test((t) => {
t.add_cleanup(() => {
setTimeout(() => {
async_test((t) => t.done(), 'after');
done();
}, 0);
throw new Error('this error is expected');
});
setTimeout(t.done.bind(t), 0);
}, 'during');
});
}
).then(({harness, tests}) => {
assert_equals(harness, 'ERROR');
assert_equals(tests.during, 'PASS');
assert_equals(tests.after, 'NOTRUN');
});
}, 'exception in `add_cleanup` of an async_test');
promise_test(() => {
return makeTest(
() => {
promise_test((t) => {
t.add_cleanup(() => { throw new Error('this error is expected'); });
return Promise.resolve();
}, 'test');
}
).then(({harness, tests}) => {
assert_equals(harness, 'ERROR');
assert_equals(tests.test, 'PASS');
});
}, 'exception in `add_cleanup` of a promise_test');
promise_test(() => {
return makeTest(
() => {
promise_test((t) => {
t.step(() => {
throw new Error('this error is expected');
});
}, 'test');
async_test((t) => t.done(), 'after');
}
).then(({harness, tests}) => {
assert_equals(harness, 'OK');
assert_equals(tests.test, 'FAIL');
assert_equals(tests.after, 'PASS');
});
}, 'exception in `step` of an async_test');
promise_test(() => {
return makeTest(
() => {
promise_test((t) => {
t.step(() => {
throw new Error('this error is expected');
});
return new Promise(() => {});
}, 'test');
// This following test should be run to completion despite the fact
// that the promise returned by the previous test never resolves.
promise_test((t) => Promise.resolve(), 'after');
}
).then(({harness, tests}) => {
assert_equals(harness, 'OK');
assert_equals(tests.test, 'FAIL');
assert_equals(tests.after, 'PASS');
});
}, 'exception in `step` of a promise_test');
</script>
</body>
</html>

View File

@ -536,7 +536,6 @@ policies and contribution forms [3].
/*
* API functions
*/
function test(func, name, properties)
{
var test_name = name ? name : test_environment.next_default_test_name();
@ -566,31 +565,42 @@ policies and contribution forms [3].
function promise_test(func, name, properties) {
var test = async_test(name, properties);
test._is_promise_test = true;
// If there is no promise tests queue make one.
if (!tests.promise_tests) {
tests.promise_tests = Promise.resolve();
}
tests.promise_tests = tests.promise_tests.then(function() {
var donePromise = new Promise(function(resolve) {
test._add_cleanup(resolve);
});
var promise = test.step(func, test, test);
test.step(function() {
assert_not_equals(promise, undefined);
});
Promise.resolve(promise).then(
function() {
return new Promise(function(resolve) {
var promise = test.step(func, test, test);
test.step(function() {
assert_not_equals(promise, undefined);
});
// Test authors may use the `step` method within a
// `promise_test` even though this reflects a mixture of
// asynchronous control flow paradigms. The "done" callback
// should be registered prior to the resolution of the
// user-provided Promise to avoid timeouts in cases where the
// Promise does not settle but a `step` function has thrown an
// error.
add_test_done_callback(test, resolve);
Promise.resolve(promise)
.catch(test.step_func(
function(value) {
if (value instanceof AssertionError) {
throw value;
}
assert(false, "promise_test", null,
"Unhandled rejection with value: ${value}", {value:value});
}))
.then(function() {
test.done();
})
.catch(test.step_func(
function(value) {
if (value instanceof AssertionError) {
throw value;
}
assert(false, "promise_test", null,
"Unhandled rejection with value: ${value}", {value:value});
}));
return donePromise;
});
});
});
}
@ -723,6 +733,8 @@ policies and contribution forms [3].
tests.set_file_is_test();
}
if (tests.file_is_test) {
// file is test files never have asynchronous cleanup logic,
// meaning the fully-sycnronous `done` funtion can be used here.
tests.tests[0].done();
}
tests.end_wait();
@ -1451,7 +1463,7 @@ policies and contribution forms [3].
}
this.name = name;
this.phase = tests.phase === tests.phases.ABORTED ?
this.phase = tests.is_aborted ?
this.phases.COMPLETE : this.phases.INITIAL;
this.status = this.NOTRUN;
@ -1470,9 +1482,11 @@ policies and contribution forms [3].
this.stack = null;
this.steps = [];
this._is_promise_test = false;
this.cleanup_callbacks = [];
this._user_defined_cleanup_count = 0;
this._done_callbacks = [];
tests.push(this);
}
@ -1490,7 +1504,8 @@ policies and contribution forms [3].
INITIAL:0,
STARTED:1,
HAS_RESULT:2,
COMPLETE:3
CLEANING:3,
COMPLETE:4
};
Test.prototype.structured_clone = function()
@ -1646,9 +1661,13 @@ policies and contribution forms [3].
Test.prototype.force_timeout = Test.prototype.timeout;
/**
* Update the test status, initiate "cleanup" functions, and signal test
* completion.
*/
Test.prototype.done = function()
{
if (this.phase == this.phases.COMPLETE) {
if (this.phase >= this.phases.CLEANING) {
return;
}
@ -1656,15 +1675,23 @@ policies and contribution forms [3].
this.set_status(this.PASS, null);
}
this.phase = this.phases.COMPLETE;
if (global_scope.clearTimeout) {
clearTimeout(this.timeout_id);
}
tests.result(this);
this.cleanup();
};
function add_test_done_callback(test, callback)
{
if (test.phase === test.phases.COMPLETE) {
callback();
return;
}
test._done_callbacks.push(callback);
}
/*
* Invoke all specified cleanup functions. If one or more produce an error,
* the context is in an unpredictable state, so all further testing should
@ -1672,30 +1699,109 @@ policies and contribution forms [3].
*/
Test.prototype.cleanup = function() {
var error_count = 0;
var total;
var bad_value_count = 0;
function on_error() {
error_count += 1;
// Abort tests immediately so that tests declared within subsequent
// cleanup functions are not run.
tests.abort();
}
var this_obj = this;
var results = [];
this.phase = this.phases.CLEANING;
forEach(this.cleanup_callbacks,
function(cleanup_callback) {
var result;
try {
cleanup_callback();
result = cleanup_callback();
} catch (e) {
// Set test phase immediately so that tests declared
// within subsequent cleanup functions are not run.
tests.phase = tests.phases.ABORTED;
error_count += 1;
on_error();
return;
}
if (!is_valid_cleanup_result(this_obj, result)) {
bad_value_count += 1;
// Abort tests immediately so that tests declared
// within subsequent cleanup functions are not run.
tests.abort();
}
results.push(result);
});
if (error_count > 0) {
total = this._user_defined_cleanup_count;
tests.status.status = tests.status.ERROR;
tests.status.message = "Test named '" + this.name +
"' specified " + total + " 'cleanup' function" +
(total > 1 ? "s" : "") + ", and " + error_count + " failed.";
tests.status.stack = null;
if (!this._is_promise_test) {
cleanup_done(this_obj, error_count, bad_value_count);
} else {
all_async(results,
function(result, done) {
if (result && typeof result.then === "function") {
result
.then(null, on_error)
.then(done);
} else {
done();
}
},
function() {
cleanup_done(this_obj, error_count, bad_value_count);
});
}
};
/**
* Determine if the return value of a cleanup function is valid for a given
* test. Any test may return the value `undefined`. Tests created with
* `promise_test` may alternatively return "thenable" object values.
*/
function is_valid_cleanup_result(test, result) {
if (result === undefined) {
return true;
}
if (test._is_promise_test) {
return result && typeof result.then === "function";
}
return false;
}
function cleanup_done(test, error_count, bad_value_count) {
if (error_count || bad_value_count) {
var total = test._user_defined_cleanup_count;
tests.status.status = tests.status.ERROR;
tests.status.message = "Test named '" + test.name +
"' specified " + total +
" 'cleanup' function" + (total > 1 ? "s" : "");
if (error_count) {
tests.status.message += ", and " + error_count + " failed";
}
if (bad_value_count) {
var type = test._is_promise_test ?
"non-thenable" : "non-undefined";
tests.status.message += ", and " + bad_value_count +
" returned a " + type + " value";
}
tests.status.message += ".";
tests.status.stack = null;
}
test.phase = test.phases.COMPLETE;
tests.result(test);
forEach(test._done_callbacks,
function(callback) {
callback();
});
test._done_callbacks.length = 0;
}
/*
* A RemoteTest object mirrors a Test object on a remote worker. The
* associated RemoteWorker updates the RemoteTest object in response to
@ -1712,6 +1818,7 @@ policies and contribution forms [3].
this.index = null;
this.phase = this.phases.INITIAL;
this.update_state_from(clone);
this._done_callbacks = [];
tests.push(this);
}
@ -1720,6 +1827,15 @@ policies and contribution forms [3].
Object.keys(this).forEach(
(function(key) {
var value = this[key];
// `RemoteTest` instances are responsible for managing
// their own "done" callback functions, so those functions
// are not relevant in other execution contexts. Because of
// this (and because Function values cannot be serialized
// for cross-realm transmittance), the property should not
// be considered when cloning instances.
if (key === '_done_callbacks' ) {
return;
}
if (typeof value === "object" && value !== null) {
clone[key] = merge({}, value);
@ -1731,7 +1847,19 @@ policies and contribution forms [3].
return clone;
};
RemoteTest.prototype.cleanup = function() {};
/**
* `RemoteTest` instances are objects which represent tests running in
* another realm. They do not define "cleanup" functions (if necessary,
* such functions are defined on the associated `Test` instance within the
* external realm). However, `RemoteTests` may have "done" callbacks (e.g.
* as attached by the `Tests` instance responsible for tracking the overall
* test status in the parent realm). The `cleanup` method delegates to
* `done` in order to ensure that such callbacks are invoked following the
* completion of the `RemoteTest`.
*/
RemoteTest.prototype.cleanup = function() {
this.done();
};
RemoteTest.prototype.phases = Test.prototype.phases;
RemoteTest.prototype.update_state_from = function(clone) {
this.status = clone.status;
@ -1743,6 +1871,11 @@ policies and contribution forms [3].
};
RemoteTest.prototype.done = function() {
this.phase = this.phases.COMPLETE;
forEach(this._done_callbacks,
function(callback) {
callback();
});
}
/*
@ -1774,6 +1907,11 @@ policies and contribution forms [3].
this.message_target = message_target;
this.message_handler = function(message) {
var passesFilter = !message_filter || message_filter(message);
// The reference to the `running` property in the following
// condition is unnecessary because that value is only set to
// `false` after the `message_handler` function has been
// unsubscribed.
// TODO: Simplify the condition by removing the reference.
if (this_obj.running && message.data && passesFilter &&
(message.data.type in this_obj.message_handlers)) {
this_obj.message_handlers[message.data.type].call(this_obj, message.data);
@ -1794,13 +1932,9 @@ policies and contribution forms [3].
var filename = (error.filename ? " " + error.filename: "");
// FIXME: Display remote error states separately from main document
// error state.
this.remote_done({
status: {
status: tests.status.ERROR,
message: "Error in remote" + filename + ": " + message,
stack: error.stack
}
});
tests.set_status(tests.status.ERROR,
"Error in remote" + filename + ": " + message,
error.stack);
if (error.preventDefault) {
error.preventDefault();
@ -1827,10 +1961,9 @@ policies and contribution forms [3].
RemoteContext.prototype.remote_done = function(data) {
if (tests.status.status === null &&
data.status.status !== data.status.OK) {
tests.status.status = data.status.status;
tests.status.message = data.status.message;
tests.status.stack = data.status.stack;
tests.set_status(data.status.status, data.status.message, data.status.sack);
}
this.message_target.removeEventListener("message", this.message_handler);
this.running = false;
@ -1902,8 +2035,7 @@ policies and contribution forms [3].
SETUP:1,
HAVE_TESTS:2,
HAVE_RESULTS:3,
COMPLETE:4,
ABORTED:5
COMPLETE:4
};
this.phase = this.phases.INITIAL;
@ -1993,6 +2125,13 @@ policies and contribution forms [3].
async_test();
};
Tests.prototype.set_status = function(status, message, stack)
{
this.status.status = status;
this.status.message = message;
this.status.stack = stack ? stack : null;
};
Tests.prototype.set_timeout = function() {
if (global_scope.clearTimeout) {
var this_obj = this;
@ -2006,9 +2145,35 @@ policies and contribution forms [3].
};
Tests.prototype.timeout = function() {
var test_in_cleanup = null;
if (this.status.status === null) {
this.status.status = this.status.TIMEOUT;
forEach(this.tests,
function(test) {
// No more than one test is expected to be in the
// "CLEANUP" phase at any time
if (test.phase === test.phases.CLEANING) {
test_in_cleanup = test;
}
test.phase = test.phases.COMPLETE;
});
// Timeouts that occur while a test is in the "cleanup" phase
// indicate that some global state was not properly reverted. This
// invalidates the overall test execution, so the timeout should be
// reported as an error and cancel the execution of any remaining
// tests.
if (test_in_cleanup) {
this.status.status = this.status.ERROR;
this.status.message = "Timeout while running cleanup for " +
"test named \"" + test_in_cleanup.name + "\".";
tests.status.stack = null;
} else {
this.status.status = this.status.TIMEOUT;
}
}
this.complete();
};
@ -2039,11 +2204,10 @@ policies and contribution forms [3].
};
Tests.prototype.all_done = function() {
return this.phase === this.phases.ABORTED ||
(this.tests.length > 0 && test_environment.all_loaded &&
this.num_pending === 0 && !this.wait_for_finish &&
return this.tests.length > 0 && test_environment.all_loaded &&
(this.num_pending === 0 || this.is_aborted) && !this.wait_for_finish &&
!this.processing_callbacks &&
!this.pending_remotes.some(function(w) { return w.running; }));
!this.pending_remotes.some(function(w) { return w.running; });
};
Tests.prototype.start = function() {
@ -2062,10 +2226,11 @@ policies and contribution forms [3].
Tests.prototype.result = function(test)
{
if (this.phase > this.phases.HAVE_RESULTS) {
return;
// If the harness has already transitioned beyond the `HAVE_RESULTS`
// phase, subsequent tests should not cause it to revert.
if (this.phase <= this.phases.HAVE_RESULTS) {
this.phase = this.phases.HAVE_RESULTS;
}
this.phase = this.phases.HAVE_RESULTS;
this.num_pending--;
this.notify_result(test);
};
@ -2088,19 +2253,54 @@ policies and contribution forms [3].
if (this.phase === this.phases.COMPLETE) {
return;
}
this.phase = this.phases.COMPLETE;
var this_obj = this;
this.tests.forEach(
function(x)
{
if (x.phase < x.phases.COMPLETE) {
this_obj.notify_result(x);
x.cleanup();
x.phase = x.phases.COMPLETE;
}
}
);
this.notify_complete();
var all_complete = function() {
this_obj.phase = this_obj.phases.COMPLETE;
this_obj.notify_complete();
};
var incomplete = filter(this.tests,
function(test) {
return test.phase < test.phases.COMPLETE;
});
/**
* To preserve legacy behavior, overall test completion must be
* signaled synchronously.
*/
if (incomplete.length === 0) {
all_complete();
return;
}
all_async(incomplete,
function(test, testDone)
{
if (test.phase === test.phases.INITIAL) {
test.phase = test.phases.COMPLETE;
testDone();
} else {
add_test_done_callback(test, testDone);
test.cleanup();
}
},
all_complete);
};
/**
* Update the harness status to reflect an unrecoverable harness error that
* should cancel all further testing. Update all previously-defined tests
* which have not yet started to indicate that they will not be executed.
*/
Tests.prototype.abort = function() {
this.status.status = this.status.ERROR;
this.is_aborted = true;
forEach(this.tests,
function(test) {
if (test.phase === test.phases.INITIAL) {
test.phase = test.phases.COMPLETE;
}
});
};
/*
@ -2866,6 +3066,57 @@ policies and contribution forms [3].
}
}
/**
* Immediately invoke a "iteratee" function with a series of values in
* parallel and invoke a final "done" function when all of the "iteratee"
* invocations have signaled completion.
*
* If all callbacks complete synchronously (or if no callbacks are
* specified), the `done_callback` will be invoked synchronously. It is the
* responsibility of the caller to ensure asynchronicity in cases where
* that is desired.
*
* @param {array} value Zero or more values to use in the invocation of
* `iter_callback`
* @param {function} iter_callback A function that will be invoked once for
* each of the provided `values`. Two
* arguments will be available in each
* invocation: the value from `values` and
* a function that must be invoked to
* signal completion
* @param {function} done_callback A function that will be invoked after
* all operations initiated by the
* `iter_callback` function have signaled
* completion
*/
function all_async(values, iter_callback, done_callback)
{
var remaining = values.length;
if (remaining === 0) {
done_callback();
}
forEach(values,
function(element) {
var invoked = false;
var elDone = function() {
if (invoked) {
return;
}
invoked = true;
remaining -= 1;
if (remaining === 0) {
done_callback();
}
};
iter_callback(element, elDone);
});
}
function merge(a,b)
{
var rv = {};
@ -3008,6 +3259,8 @@ policies and contribution forms [3].
}
test.set_status(test.FAIL, e.message, stack);
test.phase = test.phases.HAS_RESULT;
// The following function invocation is superfluous.
// TODO: Remove.
test.done();
} else if (!tests.allow_uncaught_exception) {
tests.status.status = tests.status.ERROR;