2011-06-21 12:12:40 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
#
|
|
|
|
# Any copyright is dedicated to the Public Domain.
|
|
|
|
# http://creativecommons.org/publicdomain/zero/1.0/
|
|
|
|
#
|
|
|
|
|
2011-06-28 13:52:17 +00:00
|
|
|
from __future__ import with_statement
|
2011-06-21 12:12:40 +00:00
|
|
|
import sys, os, unittest, tempfile, shutil
|
|
|
|
from StringIO import StringIO
|
2012-02-28 03:53:00 +00:00
|
|
|
from xml.etree.ElementTree import ElementTree
|
2011-06-21 12:12:40 +00:00
|
|
|
|
2013-07-19 23:20:07 +00:00
|
|
|
from mozbuild.base import MozbuildObject
|
|
|
|
build_obj = MozbuildObject.from_environment()
|
|
|
|
|
2011-06-21 12:12:40 +00:00
|
|
|
from runxpcshelltests import XPCShellTests
|
|
|
|
|
2013-07-19 23:20:07 +00:00
|
|
|
objdir = build_obj.topobjdir.encode("utf-8")
|
2011-06-21 12:12:40 +00:00
|
|
|
xpcshellBin = os.path.join(objdir, "dist", "bin", "xpcshell")
|
|
|
|
if sys.platform == "win32":
|
|
|
|
xpcshellBin += ".exe"
|
|
|
|
|
|
|
|
SIMPLE_PASSING_TEST = "function run_test() { do_check_true(true); }"
|
|
|
|
SIMPLE_FAILING_TEST = "function run_test() { do_check_true(false); }"
|
|
|
|
|
2012-12-11 18:53:43 +00:00
|
|
|
ADD_TEST_SIMPLE = '''
|
|
|
|
function run_test() { run_next_test(); }
|
|
|
|
|
|
|
|
add_test(function test_simple() {
|
|
|
|
do_check_true(true);
|
|
|
|
run_next_test();
|
|
|
|
});
|
|
|
|
'''
|
|
|
|
|
|
|
|
ADD_TEST_FAILING = '''
|
|
|
|
function run_test() { run_next_test(); }
|
|
|
|
|
|
|
|
add_test(function test_failing() {
|
|
|
|
do_check_true(false);
|
|
|
|
run_next_test();
|
|
|
|
});
|
|
|
|
'''
|
|
|
|
|
2013-07-19 23:20:07 +00:00
|
|
|
CHILD_TEST_PASSING = '''
|
|
|
|
function run_test () { run_next_test(); }
|
|
|
|
|
|
|
|
add_test(function test_child_simple () {
|
|
|
|
run_test_in_child("test_pass.js");
|
|
|
|
run_next_test();
|
|
|
|
});
|
|
|
|
'''
|
|
|
|
|
|
|
|
CHILD_TEST_FAILING = '''
|
|
|
|
function run_test () { run_next_test(); }
|
|
|
|
|
|
|
|
add_test(function test_child_simple () {
|
|
|
|
run_test_in_child("test_fail.js");
|
|
|
|
run_next_test();
|
|
|
|
});
|
|
|
|
'''
|
|
|
|
|
|
|
|
CHILD_TEST_HANG = '''
|
|
|
|
function run_test () { run_next_test(); }
|
|
|
|
|
|
|
|
add_test(function test_child_simple () {
|
|
|
|
do_test_pending("hang test");
|
|
|
|
do_load_child_test_harness();
|
2013-07-23 02:44:25 +00:00
|
|
|
sendCommand("_log('child_test_start', {_message: 'CHILD-TEST-STARTED'}); " +
|
2013-07-19 23:20:07 +00:00
|
|
|
+ "const _TEST_FILE=['test_pass.js']; _execute_test(); ",
|
|
|
|
do_test_finished);
|
|
|
|
run_next_test();
|
|
|
|
});
|
|
|
|
'''
|
|
|
|
|
2012-12-11 18:53:43 +00:00
|
|
|
ADD_TASK_SINGLE = '''
|
2013-02-01 19:43:15 +00:00
|
|
|
Components.utils.import("resource://gre/modules/commonjs/sdk/core/promise.js");
|
2012-12-11 18:53:43 +00:00
|
|
|
|
|
|
|
function run_test() { run_next_test(); }
|
|
|
|
|
|
|
|
add_task(function test_task() {
|
|
|
|
yield Promise.resolve(true);
|
|
|
|
yield Promise.resolve(false);
|
|
|
|
});
|
|
|
|
'''
|
|
|
|
|
|
|
|
ADD_TASK_MULTIPLE = '''
|
2013-02-01 19:43:15 +00:00
|
|
|
Components.utils.import("resource://gre/modules/commonjs/sdk/core/promise.js");
|
2012-12-11 18:53:43 +00:00
|
|
|
|
|
|
|
function run_test() { run_next_test(); }
|
|
|
|
|
|
|
|
add_task(function test_task() {
|
|
|
|
yield Promise.resolve(true);
|
|
|
|
});
|
|
|
|
|
|
|
|
add_task(function test_2() {
|
|
|
|
yield Promise.resolve(true);
|
|
|
|
});
|
|
|
|
'''
|
|
|
|
|
|
|
|
ADD_TASK_REJECTED = '''
|
2013-02-01 19:43:15 +00:00
|
|
|
Components.utils.import("resource://gre/modules/commonjs/sdk/core/promise.js");
|
2012-12-11 18:53:43 +00:00
|
|
|
|
|
|
|
function run_test() { run_next_test(); }
|
|
|
|
|
|
|
|
add_task(function test_failing() {
|
|
|
|
yield Promise.reject(new Error("I fail."));
|
|
|
|
});
|
|
|
|
'''
|
|
|
|
|
|
|
|
ADD_TASK_FAILURE_INSIDE = '''
|
2013-02-01 19:43:15 +00:00
|
|
|
Components.utils.import("resource://gre/modules/commonjs/sdk/core/promise.js");
|
2012-12-11 18:53:43 +00:00
|
|
|
|
|
|
|
function run_test() { run_next_test(); }
|
|
|
|
|
|
|
|
add_task(function test() {
|
|
|
|
let result = yield Promise.resolve(false);
|
|
|
|
|
|
|
|
do_check_true(result);
|
|
|
|
});
|
|
|
|
'''
|
|
|
|
|
2013-07-13 16:37:13 +00:00
|
|
|
ADD_TEST_THROW_STRING = '''
|
|
|
|
function run_test() {do_throw("Passing a string to do_throw")};
|
|
|
|
'''
|
|
|
|
|
|
|
|
ADD_TEST_THROW_OBJECT = '''
|
|
|
|
let error = {
|
|
|
|
message: "Error object",
|
|
|
|
fileName: "failure.js",
|
|
|
|
stack: "ERROR STACK",
|
|
|
|
toString: function() {return this.message;}
|
|
|
|
};
|
|
|
|
function run_test() {do_throw(error)};
|
|
|
|
'''
|
|
|
|
|
|
|
|
ADD_TEST_REPORT_OBJECT = '''
|
|
|
|
let error = {
|
|
|
|
message: "Error object",
|
|
|
|
fileName: "failure.js",
|
|
|
|
stack: "ERROR STACK",
|
|
|
|
toString: function() {return this.message;}
|
|
|
|
};
|
|
|
|
function run_test() {do_report_unexpected_exception(error)};
|
|
|
|
'''
|
|
|
|
|
|
|
|
# A test for genuine JS-generated Error objects
|
|
|
|
ADD_TEST_REPORT_REF_ERROR = '''
|
|
|
|
function run_test() {
|
|
|
|
let obj = {blah: 0};
|
|
|
|
try {
|
|
|
|
obj.noSuchFunction();
|
|
|
|
}
|
|
|
|
catch (error) {
|
|
|
|
do_report_unexpected_exception(error);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
'''
|
|
|
|
|
2011-06-21 12:12:40 +00:00
|
|
|
class XPCShellTestsTests(unittest.TestCase):
|
|
|
|
"""
|
|
|
|
Yes, these are unit tests for a unit test harness.
|
|
|
|
"""
|
|
|
|
def setUp(self):
|
|
|
|
self.log = StringIO()
|
|
|
|
self.tempdir = tempfile.mkdtemp()
|
|
|
|
self.x = XPCShellTests(log=self.log)
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
shutil.rmtree(self.tempdir)
|
|
|
|
|
|
|
|
def writeFile(self, name, contents):
|
|
|
|
"""
|
|
|
|
Write |contents| to a file named |name| in the temp directory,
|
|
|
|
and return the full path to the file.
|
|
|
|
"""
|
|
|
|
fullpath = os.path.join(self.tempdir, name)
|
|
|
|
with open(fullpath, "w") as f:
|
|
|
|
f.write(contents)
|
|
|
|
return fullpath
|
|
|
|
|
|
|
|
def writeManifest(self, tests):
|
|
|
|
"""
|
|
|
|
Write an xpcshell.ini in the temp directory and set
|
|
|
|
self.manifest to its pathname. |tests| is a list containing
|
|
|
|
either strings (for test names), or tuples with a test name
|
|
|
|
as the first element and manifest conditions as the following
|
|
|
|
elements.
|
|
|
|
"""
|
|
|
|
testlines = []
|
|
|
|
for t in tests:
|
|
|
|
testlines.append("[%s]" % (t if isinstance(t, basestring)
|
|
|
|
else t[0]))
|
|
|
|
if isinstance(t, tuple):
|
|
|
|
testlines.extend(t[1:])
|
|
|
|
self.manifest = self.writeFile("xpcshell.ini", """
|
|
|
|
[DEFAULT]
|
|
|
|
head =
|
|
|
|
tail =
|
|
|
|
|
|
|
|
""" + "\n".join(testlines))
|
|
|
|
|
2013-07-19 23:20:07 +00:00
|
|
|
def assertTestResult(self, expected, shuffle=False, xunitFilename=None, verbose=False):
|
2011-06-21 12:12:40 +00:00
|
|
|
"""
|
|
|
|
Assert that self.x.runTests with manifest=self.manifest
|
|
|
|
returns |expected|.
|
|
|
|
"""
|
|
|
|
self.assertEquals(expected,
|
|
|
|
self.x.runTests(xpcshellBin,
|
|
|
|
manifest=self.manifest,
|
2012-02-28 03:53:00 +00:00
|
|
|
mozInfo={},
|
|
|
|
shuffle=shuffle,
|
2012-03-07 00:27:22 +00:00
|
|
|
testsRootDir=self.tempdir,
|
2013-07-19 23:20:07 +00:00
|
|
|
verbose=verbose,
|
2013-07-20 02:27:14 +00:00
|
|
|
xunitFilename=xunitFilename,
|
|
|
|
sequential=True),
|
2011-06-21 12:12:40 +00:00
|
|
|
msg="""Tests should have %s, log:
|
|
|
|
========
|
|
|
|
%s
|
|
|
|
========
|
|
|
|
""" % ("passed" if expected else "failed", self.log.getvalue()))
|
|
|
|
|
|
|
|
def _assertLog(self, s, expected):
|
|
|
|
l = self.log.getvalue()
|
|
|
|
self.assertEqual(expected, s in l,
|
|
|
|
msg="""Value %s %s in log:
|
|
|
|
========
|
|
|
|
%s
|
|
|
|
========""" % (s, "expected" if expected else "not expected", l))
|
|
|
|
|
|
|
|
def assertInLog(self, s):
|
|
|
|
"""
|
|
|
|
Assert that the string |s| is contained in self.log.
|
|
|
|
"""
|
|
|
|
self._assertLog(s, True)
|
|
|
|
|
|
|
|
def assertNotInLog(self, s):
|
|
|
|
"""
|
|
|
|
Assert that the string |s| is not contained in self.log.
|
|
|
|
"""
|
|
|
|
self._assertLog(s, False)
|
|
|
|
|
|
|
|
def testPass(self):
|
|
|
|
"""
|
|
|
|
Check that a simple test without any manifest conditions passes.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_basic.js", SIMPLE_PASSING_TEST)
|
|
|
|
self.writeManifest(["test_basic.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(True)
|
|
|
|
self.assertEquals(1, self.x.testCount)
|
|
|
|
self.assertEquals(1, self.x.passCount)
|
|
|
|
self.assertEquals(0, self.x.failCount)
|
|
|
|
self.assertEquals(0, self.x.todoCount)
|
|
|
|
self.assertInLog("TEST-PASS")
|
|
|
|
self.assertNotInLog("TEST-UNEXPECTED-FAIL")
|
|
|
|
|
|
|
|
def testFail(self):
|
|
|
|
"""
|
|
|
|
Check that a simple failing test without any manifest conditions fails.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_basic.js", SIMPLE_FAILING_TEST)
|
|
|
|
self.writeManifest(["test_basic.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(False)
|
|
|
|
self.assertEquals(1, self.x.testCount)
|
|
|
|
self.assertEquals(0, self.x.passCount)
|
|
|
|
self.assertEquals(1, self.x.failCount)
|
|
|
|
self.assertEquals(0, self.x.todoCount)
|
2013-07-19 23:20:07 +00:00
|
|
|
self.assertInLog("TEST-UNEXPECTED-FAIL")
|
|
|
|
self.assertNotInLog("TEST-PASS")
|
|
|
|
|
|
|
|
@unittest.skipIf(build_obj.defines.get('MOZ_B2G'),
|
|
|
|
'selftests with child processes fail on b2g desktop builds')
|
|
|
|
def testChildPass(self):
|
|
|
|
"""
|
|
|
|
Check that a simple test running in a child process passes.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_pass.js", SIMPLE_PASSING_TEST)
|
|
|
|
self.writeFile("test_child_pass.js", CHILD_TEST_PASSING)
|
|
|
|
self.writeManifest(["test_child_pass.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(True, verbose=True)
|
|
|
|
self.assertEquals(1, self.x.testCount)
|
|
|
|
self.assertEquals(1, self.x.passCount)
|
|
|
|
self.assertEquals(0, self.x.failCount)
|
|
|
|
self.assertEquals(0, self.x.todoCount)
|
|
|
|
self.assertInLog("TEST-PASS")
|
|
|
|
self.assertInLog("CHILD-TEST-STARTED")
|
|
|
|
self.assertInLog("CHILD-TEST-COMPLETED")
|
|
|
|
self.assertNotInLog("TEST-UNEXPECTED-FAIL")
|
|
|
|
|
|
|
|
|
|
|
|
@unittest.skipIf(build_obj.defines.get('MOZ_B2G'),
|
|
|
|
'selftests with child processes fail on b2g desktop builds')
|
|
|
|
def testChildFail(self):
|
|
|
|
"""
|
|
|
|
Check that a simple failing test running in a child process fails.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_fail.js", SIMPLE_FAILING_TEST)
|
|
|
|
self.writeFile("test_child_fail.js", CHILD_TEST_FAILING)
|
|
|
|
self.writeManifest(["test_child_fail.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(False)
|
|
|
|
self.assertEquals(1, self.x.testCount)
|
|
|
|
self.assertEquals(0, self.x.passCount)
|
|
|
|
self.assertEquals(1, self.x.failCount)
|
|
|
|
self.assertEquals(0, self.x.todoCount)
|
|
|
|
self.assertInLog("TEST-UNEXPECTED-FAIL")
|
|
|
|
self.assertInLog("CHILD-TEST-STARTED")
|
|
|
|
self.assertInLog("CHILD-TEST-COMPLETED")
|
|
|
|
self.assertNotInLog("TEST-PASS")
|
|
|
|
|
|
|
|
@unittest.skipIf(build_obj.defines.get('MOZ_B2G'),
|
|
|
|
'selftests with child processes fail on b2g desktop builds')
|
|
|
|
def testChildHang(self):
|
|
|
|
"""
|
|
|
|
Check that incomplete output from a child process results in a
|
|
|
|
test failure.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_pass.js", SIMPLE_PASSING_TEST)
|
|
|
|
self.writeFile("test_child_hang.js", CHILD_TEST_HANG)
|
|
|
|
self.writeManifest(["test_child_hang.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(False)
|
|
|
|
self.assertEquals(1, self.x.testCount)
|
|
|
|
self.assertEquals(0, self.x.passCount)
|
|
|
|
self.assertEquals(1, self.x.failCount)
|
|
|
|
self.assertEquals(0, self.x.todoCount)
|
|
|
|
self.assertInLog("TEST-UNEXPECTED-FAIL")
|
|
|
|
self.assertInLog("CHILD-TEST-STARTED")
|
|
|
|
self.assertNotInLog("CHILD-TEST-COMPLETED")
|
|
|
|
self.assertNotInLog("TEST-PASS")
|
|
|
|
|
|
|
|
def testSyntaxError(self):
|
|
|
|
"""
|
|
|
|
Check that running a test file containing a syntax error produces
|
|
|
|
a test failure and expected output.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_syntax_error.js", '"')
|
|
|
|
self.writeManifest(["test_syntax_error.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(False, verbose=True)
|
|
|
|
self.assertEquals(1, self.x.testCount)
|
|
|
|
self.assertEquals(0, self.x.passCount)
|
|
|
|
self.assertEquals(1, self.x.failCount)
|
|
|
|
self.assertEquals(0, self.x.todoCount)
|
2011-06-21 12:12:40 +00:00
|
|
|
self.assertInLog("TEST-UNEXPECTED-FAIL")
|
|
|
|
self.assertNotInLog("TEST-PASS")
|
|
|
|
|
|
|
|
def testPassFail(self):
|
|
|
|
"""
|
|
|
|
Check that running more than one test works.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_pass.js", SIMPLE_PASSING_TEST)
|
|
|
|
self.writeFile("test_fail.js", SIMPLE_FAILING_TEST)
|
|
|
|
self.writeManifest(["test_pass.js", "test_fail.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(False)
|
|
|
|
self.assertEquals(2, self.x.testCount)
|
|
|
|
self.assertEquals(1, self.x.passCount)
|
|
|
|
self.assertEquals(1, self.x.failCount)
|
|
|
|
self.assertEquals(0, self.x.todoCount)
|
|
|
|
self.assertInLog("TEST-PASS")
|
|
|
|
self.assertInLog("TEST-UNEXPECTED-FAIL")
|
|
|
|
|
|
|
|
def testSkip(self):
|
|
|
|
"""
|
|
|
|
Check that a simple failing test skipped in the manifest does
|
|
|
|
not cause failure.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_basic.js", SIMPLE_FAILING_TEST)
|
|
|
|
self.writeManifest([("test_basic.js", "skip-if = true")])
|
|
|
|
self.assertTestResult(True)
|
|
|
|
self.assertEquals(1, self.x.testCount)
|
|
|
|
self.assertEquals(0, self.x.passCount)
|
|
|
|
self.assertEquals(0, self.x.failCount)
|
|
|
|
self.assertEquals(0, self.x.todoCount)
|
|
|
|
self.assertNotInLog("TEST-UNEXPECTED-FAIL")
|
|
|
|
self.assertNotInLog("TEST-PASS")
|
|
|
|
|
|
|
|
def testKnownFail(self):
|
|
|
|
"""
|
|
|
|
Check that a simple failing test marked as known-fail in the manifest
|
|
|
|
does not cause failure.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_basic.js", SIMPLE_FAILING_TEST)
|
|
|
|
self.writeManifest([("test_basic.js", "fail-if = true")])
|
|
|
|
self.assertTestResult(True)
|
|
|
|
self.assertEquals(1, self.x.testCount)
|
|
|
|
self.assertEquals(0, self.x.passCount)
|
|
|
|
self.assertEquals(0, self.x.failCount)
|
|
|
|
self.assertEquals(1, self.x.todoCount)
|
|
|
|
self.assertInLog("TEST-KNOWN-FAIL")
|
|
|
|
# This should be suppressed because the harness doesn't include
|
|
|
|
# the full log from the xpcshell run when things pass.
|
|
|
|
self.assertNotInLog("TEST-UNEXPECTED-FAIL")
|
|
|
|
self.assertNotInLog("TEST-PASS")
|
|
|
|
|
|
|
|
def testUnexpectedPass(self):
|
|
|
|
"""
|
|
|
|
Check that a simple failing test marked as known-fail in the manifest
|
|
|
|
that passes causes an unexpected pass.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_basic.js", SIMPLE_PASSING_TEST)
|
|
|
|
self.writeManifest([("test_basic.js", "fail-if = true")])
|
|
|
|
self.assertTestResult(False)
|
|
|
|
self.assertEquals(1, self.x.testCount)
|
|
|
|
self.assertEquals(0, self.x.passCount)
|
|
|
|
self.assertEquals(1, self.x.failCount)
|
|
|
|
self.assertEquals(0, self.x.todoCount)
|
|
|
|
# From the outer (Python) harness
|
|
|
|
self.assertInLog("TEST-UNEXPECTED-PASS")
|
|
|
|
self.assertNotInLog("TEST-KNOWN-FAIL")
|
|
|
|
# From the inner (JS) harness
|
|
|
|
self.assertInLog("TEST-PASS")
|
|
|
|
|
|
|
|
def testReturnNonzero(self):
|
|
|
|
"""
|
|
|
|
Check that a test where xpcshell returns nonzero fails.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_error.js", "throw 'foo'")
|
|
|
|
self.writeManifest(["test_error.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(False)
|
|
|
|
self.assertEquals(1, self.x.testCount)
|
|
|
|
self.assertEquals(0, self.x.passCount)
|
|
|
|
self.assertEquals(1, self.x.failCount)
|
|
|
|
self.assertEquals(0, self.x.todoCount)
|
|
|
|
self.assertInLog("TEST-UNEXPECTED-FAIL")
|
|
|
|
self.assertNotInLog("TEST-PASS")
|
|
|
|
|
2012-12-11 18:53:43 +00:00
|
|
|
def testAddTestSimple(self):
|
|
|
|
"""
|
|
|
|
Ensure simple add_test() works.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_add_test_simple.js", ADD_TEST_SIMPLE)
|
|
|
|
self.writeManifest(["test_add_test_simple.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(True)
|
|
|
|
self.assertEquals(1, self.x.testCount)
|
|
|
|
self.assertEquals(1, self.x.passCount)
|
|
|
|
self.assertEquals(0, self.x.failCount)
|
|
|
|
|
|
|
|
def testAddTestFailing(self):
|
|
|
|
"""
|
|
|
|
Ensure add_test() with a failing test is reported.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_add_test_failing.js", ADD_TEST_FAILING)
|
|
|
|
self.writeManifest(["test_add_test_failing.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(False)
|
|
|
|
self.assertEquals(1, self.x.testCount)
|
|
|
|
self.assertEquals(0, self.x.passCount)
|
|
|
|
self.assertEquals(1, self.x.failCount)
|
|
|
|
|
|
|
|
def testAddTaskTestSingle(self):
|
|
|
|
"""
|
|
|
|
Ensure add_test_task() with a single passing test works.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_add_task_simple.js", ADD_TASK_SINGLE)
|
|
|
|
self.writeManifest(["test_add_task_simple.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(True)
|
|
|
|
self.assertEquals(1, self.x.testCount)
|
|
|
|
self.assertEquals(1, self.x.passCount)
|
|
|
|
self.assertEquals(0, self.x.failCount)
|
|
|
|
|
|
|
|
def testAddTaskTestMultiple(self):
|
|
|
|
"""
|
|
|
|
Ensure multiple calls to add_test_task() work as expected.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_add_task_multiple.js",
|
|
|
|
ADD_TASK_MULTIPLE)
|
|
|
|
self.writeManifest(["test_add_task_multiple.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(True)
|
|
|
|
self.assertEquals(1, self.x.testCount)
|
|
|
|
self.assertEquals(1, self.x.passCount)
|
|
|
|
self.assertEquals(0, self.x.failCount)
|
|
|
|
|
|
|
|
def testAddTaskTestRejected(self):
|
|
|
|
"""
|
|
|
|
Ensure rejected task reports as failure.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_add_task_rejected.js",
|
|
|
|
ADD_TASK_REJECTED)
|
|
|
|
self.writeManifest(["test_add_task_rejected.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(False)
|
|
|
|
self.assertEquals(1, self.x.testCount)
|
|
|
|
self.assertEquals(0, self.x.passCount)
|
|
|
|
self.assertEquals(1, self.x.failCount)
|
|
|
|
|
|
|
|
def testAddTaskTestFailureInside(self):
|
|
|
|
"""
|
|
|
|
Ensure tests inside task are reported as failures.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_add_task_failure_inside.js",
|
|
|
|
ADD_TASK_FAILURE_INSIDE)
|
|
|
|
self.writeManifest(["test_add_task_failure_inside.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(False)
|
|
|
|
self.assertEquals(1, self.x.testCount)
|
|
|
|
self.assertEquals(0, self.x.passCount)
|
|
|
|
self.assertEquals(1, self.x.failCount)
|
|
|
|
|
2012-03-28 23:05:22 +00:00
|
|
|
def testMissingHeadFile(self):
|
|
|
|
"""
|
|
|
|
Ensure that missing head file results in fatal error.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_basic.js", SIMPLE_PASSING_TEST)
|
|
|
|
self.writeManifest([("test_basic.js", "head = missing.js")])
|
|
|
|
|
|
|
|
raised = False
|
|
|
|
|
|
|
|
try:
|
|
|
|
# The actual return value is never checked because we raise.
|
|
|
|
self.assertTestResult(True)
|
|
|
|
except Exception, ex:
|
|
|
|
raised = True
|
|
|
|
self.assertEquals(ex.message[0:9], "head file")
|
|
|
|
|
|
|
|
self.assertTrue(raised)
|
|
|
|
|
|
|
|
def testMissingTailFile(self):
|
|
|
|
"""
|
|
|
|
Ensure that missing tail file results in fatal error.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_basic.js", SIMPLE_PASSING_TEST)
|
|
|
|
self.writeManifest([("test_basic.js", "tail = missing.js")])
|
|
|
|
|
|
|
|
raised = False
|
|
|
|
|
|
|
|
try:
|
|
|
|
self.assertTestResult(True)
|
|
|
|
except Exception, ex:
|
|
|
|
raised = True
|
|
|
|
self.assertEquals(ex.message[0:9], "tail file")
|
|
|
|
|
|
|
|
self.assertTrue(raised)
|
|
|
|
|
2012-02-14 19:49:55 +00:00
|
|
|
def testRandomExecution(self):
|
|
|
|
"""
|
|
|
|
Check that random execution doesn't break.
|
|
|
|
"""
|
|
|
|
manifest = []
|
|
|
|
for i in range(0, 10):
|
|
|
|
filename = "test_pass_%d.js" % i
|
|
|
|
self.writeFile(filename, SIMPLE_PASSING_TEST)
|
|
|
|
manifest.append(filename)
|
|
|
|
|
|
|
|
self.writeManifest(manifest)
|
|
|
|
self.assertTestResult(True, shuffle=True)
|
|
|
|
self.assertEquals(10, self.x.testCount)
|
|
|
|
self.assertEquals(10, self.x.passCount)
|
|
|
|
|
2012-02-28 03:53:00 +00:00
|
|
|
def testXunitOutput(self):
|
|
|
|
"""
|
|
|
|
Check that Xunit XML files are written.
|
|
|
|
"""
|
|
|
|
self.writeFile("test_00.js", SIMPLE_PASSING_TEST)
|
|
|
|
self.writeFile("test_01.js", SIMPLE_FAILING_TEST)
|
|
|
|
self.writeFile("test_02.js", SIMPLE_PASSING_TEST)
|
|
|
|
|
|
|
|
manifest = [
|
|
|
|
"test_00.js",
|
|
|
|
"test_01.js",
|
|
|
|
("test_02.js", "skip-if = true")
|
|
|
|
]
|
|
|
|
|
|
|
|
self.writeManifest(manifest)
|
|
|
|
|
|
|
|
filename = os.path.join(self.tempdir, "xunit.xml")
|
|
|
|
|
|
|
|
self.assertTestResult(False, xunitFilename=filename)
|
|
|
|
|
|
|
|
self.assertTrue(os.path.exists(filename))
|
|
|
|
self.assertTrue(os.path.getsize(filename) > 0)
|
|
|
|
|
|
|
|
tree = ElementTree()
|
|
|
|
tree.parse(filename)
|
|
|
|
suite = tree.getroot()
|
|
|
|
|
|
|
|
self.assertTrue(suite is not None)
|
|
|
|
self.assertEqual(suite.get("tests"), "3")
|
|
|
|
self.assertEqual(suite.get("failures"), "1")
|
|
|
|
self.assertEqual(suite.get("skip"), "1")
|
|
|
|
|
|
|
|
testcases = suite.findall("testcase")
|
|
|
|
self.assertEqual(len(testcases), 3)
|
|
|
|
|
|
|
|
for testcase in testcases:
|
|
|
|
attributes = testcase.keys()
|
|
|
|
self.assertTrue("classname" in attributes)
|
|
|
|
self.assertTrue("name" in attributes)
|
|
|
|
self.assertTrue("time" in attributes)
|
|
|
|
|
|
|
|
self.assertTrue(testcases[1].find("failure") is not None)
|
|
|
|
self.assertTrue(testcases[2].find("skipped") is not None)
|
|
|
|
|
2013-07-13 16:37:13 +00:00
|
|
|
def testDoThrowString(self):
|
|
|
|
"""
|
|
|
|
Check that do_throw produces reasonable messages when the
|
|
|
|
input is a string instead of an object
|
|
|
|
"""
|
|
|
|
self.writeFile("test_error.js", ADD_TEST_THROW_STRING)
|
|
|
|
self.writeManifest(["test_error.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(False)
|
|
|
|
self.assertInLog("TEST-UNEXPECTED-FAIL")
|
|
|
|
self.assertInLog("Passing a string to do_throw")
|
|
|
|
self.assertNotInLog("TEST-PASS")
|
|
|
|
|
|
|
|
def testDoThrowForeignObject(self):
|
|
|
|
"""
|
|
|
|
Check that do_throw produces reasonable messages when the
|
|
|
|
input is a generic object with 'filename', 'message' and 'stack' attributes
|
|
|
|
but 'object instanceof Error' returns false
|
|
|
|
"""
|
|
|
|
self.writeFile("test_error.js", ADD_TEST_THROW_OBJECT)
|
|
|
|
self.writeManifest(["test_error.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(False)
|
|
|
|
self.assertInLog("TEST-UNEXPECTED-FAIL")
|
|
|
|
self.assertInLog("failure.js")
|
|
|
|
self.assertInLog("Error object")
|
|
|
|
self.assertInLog("ERROR STACK")
|
|
|
|
self.assertNotInLog("TEST-PASS")
|
|
|
|
|
|
|
|
def testDoReportForeignObject(self):
|
|
|
|
"""
|
|
|
|
Check that do_report_unexpected_exception produces reasonable messages when the
|
|
|
|
input is a generic object with 'filename', 'message' and 'stack' attributes
|
|
|
|
but 'object instanceof Error' returns false
|
|
|
|
"""
|
|
|
|
self.writeFile("test_error.js", ADD_TEST_REPORT_OBJECT)
|
|
|
|
self.writeManifest(["test_error.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(False)
|
|
|
|
self.assertInLog("TEST-UNEXPECTED-FAIL")
|
|
|
|
self.assertInLog("failure.js")
|
|
|
|
self.assertInLog("Error object")
|
|
|
|
self.assertInLog("ERROR STACK")
|
|
|
|
self.assertNotInLog("TEST-PASS")
|
|
|
|
|
|
|
|
def testDoReportRefError(self):
|
|
|
|
"""
|
|
|
|
Check that do_report_unexpected_exception produces reasonable messages when the
|
|
|
|
input is a JS-generated Error
|
|
|
|
"""
|
|
|
|
self.writeFile("test_error.js", ADD_TEST_REPORT_REF_ERROR)
|
|
|
|
self.writeManifest(["test_error.js"])
|
|
|
|
|
|
|
|
self.assertTestResult(False)
|
|
|
|
self.assertInLog("TEST-UNEXPECTED-FAIL")
|
|
|
|
self.assertInLog("test_error.js")
|
|
|
|
self.assertInLog("obj.noSuchFunction is not a function")
|
|
|
|
self.assertInLog("run_test@")
|
|
|
|
self.assertNotInLog("TEST-PASS")
|
|
|
|
|
2011-06-21 12:12:40 +00:00
|
|
|
if __name__ == "__main__":
|
|
|
|
unittest.main()
|