Bug 571062 - Add slow-test annotation functionality to reftest and the JS test harness. r=dbaron, r=dmandelin

This commit is contained in:
Jeff Walden 2010-07-13 18:04:29 -05:00
parent 2c32aaa513
commit 629a60bd96
8 changed files with 69 additions and 17 deletions

View File

@ -149,11 +149,11 @@ script 15.9.5.7.js
script 15.9.5.8.js
script 15.9.5.9.js
script 15.9.5.js
script dst-offset-caching-1-of-8.js
script dst-offset-caching-2-of-8.js
script dst-offset-caching-3-of-8.js
script dst-offset-caching-4-of-8.js
script dst-offset-caching-5-of-8.js
script dst-offset-caching-6-of-8.js
script dst-offset-caching-7-of-8.js
script dst-offset-caching-8-of-8.js
slow script dst-offset-caching-1-of-8.js
slow script dst-offset-caching-2-of-8.js
slow script dst-offset-caching-3-of-8.js
slow script dst-offset-caching-4-of-8.js
slow script dst-offset-caching-5-of-8.js
slow script dst-offset-caching-6-of-8.js
slow script dst-offset-caching-7-of-8.js
slow script dst-offset-caching-8-of-8.js

View File

@ -227,6 +227,8 @@ if __name__ == '__main__':
help='check for test files not listed in the manifest')
op.add_option('--failure-file', dest='failure_file',
help='write tests that have not passed to the given file')
op.add_option('--run-slow-tests', dest='run_slow_tests', action='store_true',
help='run particularly slow tests as well as average-speed tests')
(OPTIONS, args) = op.parse_args()
if len(args) < 1:
if not OPTIONS.check_manifest:
@ -310,6 +312,9 @@ if __name__ == '__main__':
OPTIONS.run_skipped = True
test_list = [ _ for _ in test_list if not _.enable ]
if not OPTIONS.run_slow_tests:
test_list = [ _ for _ in test_list if not _.slow ]
if OPTIONS.debug and test_list:
if len(test_list) > 1:
print('Multiple tests match command line arguments, debugger can only run one')

View File

@ -110,6 +110,7 @@ def parse(filename, xul_tester, reldir = ''):
enable = True
expect = True
random = False
slow = False
pos = 0
while pos < len(parts):
@ -144,11 +145,14 @@ def parse(filename, xul_tester, reldir = ''):
elif parts[pos] == 'script':
script = parts[pos+1]
pos += 2
elif parts[pos] == 'slow':
slow = True
pos += 1
else:
print 'warning: invalid manifest line element "%s"'%parts[pos]
pos += 1
assert script is not None
ans.append(TestCase(os.path.join(reldir, script),
enable, expect, random))
enable, expect, random, slow))
return ans

View File

@ -87,11 +87,12 @@ class Test(object):
class TestCase(Test):
"""A test case consisting of a test and an expected result."""
def __init__(self, path, enable, expect, random):
def __init__(self, path, enable, expect, random, slow):
Test.__init__(self, path)
self.enable = enable # bool: True => run test, False => don't run
self.expect = expect # bool: expected result, True => pass
self.random = random # bool: True => ignore output as 'random'
self.slow = slow # bool: True => test may run slowly
def __str__(self):
ans = self.path
@ -101,6 +102,8 @@ class TestCase(Test):
ans += ', fails'
if self.random:
ans += ', random'
if self.slow:
ans += ', slow'
return ans
class TestOutput:

View File

@ -74,6 +74,17 @@ must be one of the following:
particular platform (i.e. it allows us to get test
coverage on the other platforms).
slow The test may take a long time to run, so run it if slow tests are
either enabled or not disabled (test manifest interpreters may
choose whether or not to run such tests by default).
slow-if(condition) If the condition is met, the test is treated as if
'slow' had been specified. This is useful for tests
which are slow only on particular platforms (e.g. a
test which exercised out-of-memory behavior might be
fast on a 32-bit system but inordinately slow on a
64-bit system).
asserts(count)
Loading the test and reference is known to assert exactly
count times.

View File

@ -79,6 +79,13 @@ RefTestCmdLineHandler.prototype =
catch (e) {
}
try {
var skipslowtests = cmdLine.handleFlag("reftestskipslowtests", false);
args.skipslowtests = skipslowtests;
}
catch (e) {
}
/* Ignore the platform's online/offline status while running reftests. */
var ios = Components.classes["@mozilla.org/network/io-service;1"]
.getService(Components.interfaces.nsIIOService2);

View File

@ -90,6 +90,7 @@ var gTestResults = {
AssertionKnown: 0,
Random : 0,
Skip: 0,
Slow: 0,
};
var gTotalTests = 0;
var gState;
@ -127,6 +128,9 @@ const gProtocolRE = /^\w+:/;
var HTTP_SERVER_PORT = 4444;
const HTTP_SERVER_PORTS_TO_TRY = 50;
// whether to run slow tests or not
var gRunSlowTests = true;
// whether we should skip caching canvases
var gNoCanvasCache = false;
@ -259,6 +263,9 @@ function StartTests()
if ("nocache" in args && args["nocache"])
gNoCanvasCache = true;
if ("skipslowtests" in args && args.skipslowtests)
gRunSlowTests = false;
ReadTopManifest(args.uri);
BuildUseCounts();
@ -660,10 +667,19 @@ function ServeFiles(manifestURL, depth, aURL, files)
function StartCurrentTest()
{
// make sure we don't run tests that are expected to kill the browser
while (gURLs.length > 0 && gURLs[0].expected == EXPECTED_DEATH) {
++gTestResults.Skip;
gDumpLog("REFTEST TEST-KNOWN-FAIL | " + gURLs[0].url1.spec + " | (SKIP)\n");
gURLs.shift();
while (gURLs.length > 0) {
var test = gURLs[0];
if (test.expected == EXPECTED_DEATH) {
++gTestResults.Skip;
gDumpLog("REFTEST TEST-KNOWN-FAIL | " + test.url1.spec + " | (SKIP)\n");
gURLs.shift();
} else if (test.slow && !gRunSlowTests) {
++gTestResults.Slow;
gDumpLog("REFTEST TEST-KNOWN-SLOW | " + test.url1.spec + " | (SLOW)\n");
gURLs.shift();
} else {
break;
}
}
if (gURLs.length == 0) {
@ -726,12 +742,13 @@ function DoneTests()
gTestResults.FailedLoad + " failed load, " +
gTestResults.Exception + " exception)\n");
count = gTestResults.KnownFail + gTestResults.AssertionKnown +
gTestResults.Random + gTestResults.Skip;
gDumpLog("REFTEST INFO | Known problems: " + count + " (" +
gTestResults.Random + gTestResults.Skip + gTestResults.Slow;
dump("REFTEST INFO | Known problems: " + count + " (" +
gTestResults.KnownFail + " known fail, " +
gTestResults.AssertionKnown + " known asserts, " +
gTestResults.Random + " random, " +
gTestResults.Skip + " skipped)\n");
gTestResults.Skip + " skipped, " +
gTestResults.Slow + " slow)\n");
gDumpLog("REFTEST INFO | Total canvas count = " + gRecycledCanvases.length + "\n");

View File

@ -229,6 +229,11 @@ class ReftestOptions(OptionParser):
help = "file to log output to in addition to stdout")
defaults["logFile"] = None
self.add_option("--skip-slow-tests",
dest = "skipSlowTests", action = "store_true",
help = "skip tests marked as slow when running")
defaults["skipSlowTests"] = False
self.set_defaults(**defaults)
def main():