mirror of
https://github.com/mozilla/gecko-dev.git
synced 2025-02-18 06:45:33 +00:00
Bug 778133 - Update mozbase in m-c for bug 776931. r=jhammel
This commit is contained in:
parent
a74b36c7dc
commit
ca785a485f
@ -1,4 +1,22 @@
|
||||
This is the git repo for the Mozilla mozbase suite of python utilities.
|
||||
This is the git repository for the Mozilla mozbase suite of python utilities.
|
||||
|
||||
MozBase is composed of several python packages. These packages work
|
||||
together to form the basis of a test harness.
|
||||
|
||||
* Firefox is launched via [mozrunner](https://github.com/mozilla/mozbase/tree/master/mozrunner)
|
||||
** which sets up a profile with preferences and extensions using
|
||||
[mozprofile ](https://github.com/mozilla/mozbase/tree/master/mozprofile)
|
||||
** and runs the application under test using [mozprocess](https://github.com/mozilla/mozbase/tree/master/mozprocess)
|
||||
* [mozInstall](https://github.com/mozilla/mozbase/tree/master/mozinstall) is used to install the test application
|
||||
* A test harness may direct Firefox to load web pages. These may be
|
||||
served using [mozhttpd](https://github.com/mozilla/mozbase/tree/master/mozhttpd) for testing
|
||||
* The machine environment is introspected by [mozinfo](https://github.com/mozilla/mozbase/tree/master/mozinfo)
|
||||
* A test manifest may be read to determine the tests to be run. These
|
||||
manifests are processed by [ManifestDestiny](https://github.com/mozilla/mozbase/tree/master/manifestdestiny)
|
||||
* For mozbile testing, the test runner communicates to the test agent
|
||||
using [mozdevice](https://github.com/mozilla/mozbase/tree/master/mozdevice)
|
||||
(Note that the canonical location of mozdevice is
|
||||
mozilla-central: http://mxr.mozilla.org/mozilla-central/source/build/mobile/)
|
||||
|
||||
Learn more about mozbase here:
|
||||
https://wiki.mozilla.org/Auto-tools/Projects/MozBase
|
||||
|
@ -562,6 +562,7 @@ falling back to not using job objects for managing child processes"""
|
||||
self.didTimeout = False
|
||||
self._ignore_children = ignore_children
|
||||
self.keywordargs = kwargs
|
||||
self.outThread = None
|
||||
|
||||
if env is None:
|
||||
env = os.environ.copy()
|
||||
@ -647,7 +648,7 @@ falling back to not using job objects for managing child processes"""
|
||||
for handler in self.onFinishHandlers:
|
||||
handler()
|
||||
|
||||
def waitForFinish(self, timeout=None, outputTimeout=None):
|
||||
def processOutput(self, timeout=None, outputTimeout=None):
|
||||
"""
|
||||
Handle process output until the process terminates or times out.
|
||||
|
||||
@ -658,34 +659,58 @@ falling back to not using job objects for managing child processes"""
|
||||
for that number of seconds without producing any output before
|
||||
being killed.
|
||||
"""
|
||||
def _processOutput():
|
||||
if not hasattr(self, 'proc'):
|
||||
self.run()
|
||||
|
||||
if not hasattr(self, 'proc'):
|
||||
self.run()
|
||||
self.didTimeout = False
|
||||
logsource = self.proc.stdout
|
||||
|
||||
self.didTimeout = False
|
||||
logsource = self.proc.stdout
|
||||
|
||||
lineReadTimeout = None
|
||||
if timeout:
|
||||
lineReadTimeout = timeout - (datetime.now() - self.startTime).seconds
|
||||
elif outputTimeout:
|
||||
lineReadTimeout = outputTimeout
|
||||
|
||||
(line, self.didTimeout) = self.readWithTimeout(logsource, lineReadTimeout)
|
||||
while line != "" and not self.didTimeout:
|
||||
self.processOutputLine(line.rstrip())
|
||||
lineReadTimeout = None
|
||||
if timeout:
|
||||
lineReadTimeout = timeout - (datetime.now() - self.startTime).seconds
|
||||
elif outputTimeout:
|
||||
lineReadTimeout = outputTimeout
|
||||
|
||||
(line, self.didTimeout) = self.readWithTimeout(logsource, lineReadTimeout)
|
||||
while line != "" and not self.didTimeout:
|
||||
self.processOutputLine(line.rstrip())
|
||||
if timeout:
|
||||
lineReadTimeout = timeout - (datetime.now() - self.startTime).seconds
|
||||
(line, self.didTimeout) = self.readWithTimeout(logsource, lineReadTimeout)
|
||||
|
||||
if self.didTimeout:
|
||||
self.proc.kill()
|
||||
self.onTimeout()
|
||||
else:
|
||||
self.onFinish()
|
||||
if self.didTimeout:
|
||||
self.proc.kill()
|
||||
self.onTimeout()
|
||||
else:
|
||||
self.onFinish()
|
||||
|
||||
if not self.outThread:
|
||||
self.outThread = threading.Thread(target=_processOutput)
|
||||
self.outThread.daemon = True
|
||||
self.outThread.start()
|
||||
|
||||
status = self.proc.wait()
|
||||
return status
|
||||
|
||||
def waitForFinish(self, timeout=None):
|
||||
"""
|
||||
Waits until all output has been read and the process is
|
||||
terminated.
|
||||
|
||||
If timeout is not None, will return after timeout seconds.
|
||||
This timeout is only for waitForFinish and doesn't affect
|
||||
the didTimeout or onTimeout properties.
|
||||
"""
|
||||
if self.outThread:
|
||||
# Thread.join() blocks the main thread until outThread is finished
|
||||
# wake up once a second in case a keyboard interrupt is sent
|
||||
count = 0
|
||||
while self.outThread.isAlive():
|
||||
self.outThread.join(timeout=1)
|
||||
count += 1
|
||||
if timeout and count > timeout:
|
||||
return
|
||||
|
||||
return self.proc.wait()
|
||||
|
||||
|
||||
### Private methods from here on down. Thar be dragons.
|
||||
@ -762,7 +787,6 @@ class LogOutput(object):
|
||||
if self.file is not None:
|
||||
self.file.close()
|
||||
|
||||
|
||||
### front end class with the default handlers
|
||||
|
||||
class ProcessHandler(ProcessHandlerMixin):
|
||||
|
@ -46,20 +46,20 @@ def check_for_process(processName):
|
||||
p1 = subprocess.Popen(["tasklist"], stdout=subprocess.PIPE)
|
||||
output = p1.communicate()[0]
|
||||
detected = False
|
||||
for line in output:
|
||||
for line in output.splitlines():
|
||||
if processName in line:
|
||||
detected = True
|
||||
break
|
||||
else:
|
||||
p1 = subprocess.Popen(["ps", "-A"], stdout=subprocess.PIPE)
|
||||
p1 = subprocess.Popen(["ps", "-ef"], stdout=subprocess.PIPE)
|
||||
p2 = subprocess.Popen(["grep", processName], stdin=p1.stdout, stdout=subprocess.PIPE)
|
||||
p1.stdout.close()
|
||||
output = p2.communicate()[0]
|
||||
detected = False
|
||||
for line in output:
|
||||
for line in output.splitlines():
|
||||
if "grep %s" % processName in line:
|
||||
continue
|
||||
elif processName in line:
|
||||
elif processName in line and not 'defunct' in line:
|
||||
detected = True
|
||||
break
|
||||
|
||||
@ -81,6 +81,7 @@ class ProcTest1(unittest.TestCase):
|
||||
p = processhandler.ProcessHandler([self.proclaunch, "process_normal_finish.ini"],
|
||||
cwd=here)
|
||||
p.run()
|
||||
p.processOutput()
|
||||
p.waitForFinish()
|
||||
|
||||
detected, output = check_for_process(self.proclaunch)
|
||||
@ -96,7 +97,8 @@ class ProcTest1(unittest.TestCase):
|
||||
p = processhandler.ProcessHandler([self.proclaunch, "process_waittimeout.ini"],
|
||||
cwd=here)
|
||||
p.run()
|
||||
p.waitForFinish(timeout=10)
|
||||
p.processOutput(timeout=10)
|
||||
p.waitForFinish()
|
||||
|
||||
detected, output = check_for_process(self.proclaunch)
|
||||
self.determine_status(detected,
|
||||
@ -112,6 +114,7 @@ class ProcTest1(unittest.TestCase):
|
||||
p = processhandler.ProcessHandler([self.proclaunch, "process_normal_finish.ini"],
|
||||
cwd=here)
|
||||
p.run()
|
||||
p.processOutput()
|
||||
p.kill()
|
||||
|
||||
detected, output = check_for_process(self.proclaunch)
|
||||
@ -139,19 +142,19 @@ class ProcTest1(unittest.TestCase):
|
||||
expectedfail -- Defaults to [], used to indicate a list of fields that are expected to fail
|
||||
"""
|
||||
if 'returncode' in expectedfail:
|
||||
self.assertTrue(returncode, "Detected an expected non-zero return code")
|
||||
else:
|
||||
self.assertTrue(returncode, "Detected an unexpected return code of: %s" % returncode)
|
||||
elif not isalive:
|
||||
self.assertTrue(returncode == 0, "Detected non-zero return code of: %d" % returncode)
|
||||
|
||||
if 'didtimeout' in expectedfail:
|
||||
self.assertTrue(didtimeout, "Process timed out as expected")
|
||||
self.assertTrue(didtimeout, "Detected that process didn't time out")
|
||||
else:
|
||||
self.assertTrue(not didtimeout, "Detected that process timed out")
|
||||
|
||||
if detected:
|
||||
self.assertTrue(isalive, "Detected process is still running, process output: %s" % output)
|
||||
if isalive:
|
||||
self.assertTrue(detected, "Detected process is not running, process output: %s" % output)
|
||||
else:
|
||||
self.assertTrue(not isalive, "Process ended")
|
||||
self.assertTrue(not detected, "Detected process is still running, process output: %s" % output)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -51,20 +51,20 @@ def check_for_process(processName):
|
||||
p1 = subprocess.Popen(["tasklist"], stdout=subprocess.PIPE)
|
||||
output = p1.communicate()[0]
|
||||
detected = False
|
||||
for line in output:
|
||||
for line in output.splitlines():
|
||||
if processName in line:
|
||||
detected = True
|
||||
break
|
||||
else:
|
||||
p1 = subprocess.Popen(["ps", "-A"], stdout=subprocess.PIPE)
|
||||
p1 = subprocess.Popen(["ps", "-ef"], stdout=subprocess.PIPE)
|
||||
p2 = subprocess.Popen(["grep", processName], stdin=p1.stdout, stdout=subprocess.PIPE)
|
||||
p1.stdout.close()
|
||||
output = p2.communicate()[0]
|
||||
detected = False
|
||||
for line in output:
|
||||
for line in output.splitlines():
|
||||
if "grep %s" % processName in line:
|
||||
continue
|
||||
elif processName in line:
|
||||
elif processName in line and not 'defunct' in line:
|
||||
detected = True
|
||||
break
|
||||
|
||||
@ -79,14 +79,15 @@ class ProcTest2(unittest.TestCase):
|
||||
self.proclaunch = make_proclaunch(here)
|
||||
unittest.TestCase.__init__(self, *args, **kwargs)
|
||||
|
||||
def test_process_waittimeout(self):
|
||||
def test_process_waitnotimeout(self):
|
||||
""" Process is started, runs to completion before our wait times out
|
||||
"""
|
||||
p = processhandler.ProcessHandler([self.proclaunch,
|
||||
"process_waittimeout_10s.ini"],
|
||||
cwd=here)
|
||||
p.run()
|
||||
p.waitForFinish(timeout=30)
|
||||
p.processOutput(timeout=30)
|
||||
p.waitForFinish()
|
||||
|
||||
detected, output = check_for_process(self.proclaunch)
|
||||
self.determine_status(detected,
|
||||
@ -94,7 +95,7 @@ class ProcTest2(unittest.TestCase):
|
||||
p.proc.returncode,
|
||||
p.didTimeout)
|
||||
|
||||
def test_process_waitnotimeout(self):
|
||||
def test_process_wait(self):
|
||||
""" Process is started runs to completion while we wait indefinitely
|
||||
"""
|
||||
|
||||
@ -110,6 +111,27 @@ class ProcTest2(unittest.TestCase):
|
||||
p.proc.returncode,
|
||||
p.didTimeout)
|
||||
|
||||
def test_process_waittimeout(self):
|
||||
"""
|
||||
Process is started, then waitForFinish is called and times out.
|
||||
Process is still running and didn't timeout
|
||||
"""
|
||||
p = processhandler.ProcessHandler([self.proclaunch,
|
||||
"process_waittimeout_10s.ini"],
|
||||
cwd=here)
|
||||
|
||||
p.run()
|
||||
p.processOutput()
|
||||
p.waitForFinish(timeout=5)
|
||||
|
||||
detected, output = check_for_process(self.proclaunch)
|
||||
self.determine_status(detected,
|
||||
output,
|
||||
p.proc.returncode,
|
||||
p.didTimeout,
|
||||
True,
|
||||
[])
|
||||
|
||||
def determine_status(self,
|
||||
detected=False,
|
||||
output = '',
|
||||
@ -129,19 +151,19 @@ class ProcTest2(unittest.TestCase):
|
||||
expectedfail -- Defaults to [], used to indicate a list of fields that are expected to fail
|
||||
"""
|
||||
if 'returncode' in expectedfail:
|
||||
self.assertTrue(returncode, "Detected an expected non-zero return code")
|
||||
else:
|
||||
self.assertTrue(returncode, "Detected an unexpected return code of: %s" % returncode)
|
||||
elif not isalive:
|
||||
self.assertTrue(returncode == 0, "Detected non-zero return code of: %d" % returncode)
|
||||
|
||||
if 'didtimeout' in expectedfail:
|
||||
self.assertTrue(didtimeout, "Process timed out as expected")
|
||||
self.assertTrue(didtimeout, "Detected that process didn't time out")
|
||||
else:
|
||||
self.assertTrue(not didtimeout, "Detected that process timed out")
|
||||
|
||||
if detected:
|
||||
self.assertTrue(isalive, "Detected process is still running, process output: %s" % output)
|
||||
if isalive:
|
||||
self.assertTrue(detected, "Detected process is not running, process output: %s" % output)
|
||||
else:
|
||||
self.assertTrue(not isalive, "Process ended")
|
||||
self.assertTrue(not detected, "Detected process is still running, process output: %s" % output)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -172,8 +172,11 @@ class AddonManager(object):
|
||||
# if the addon is a directory, install all addons in it
|
||||
addons = [path]
|
||||
if not path.endswith('.xpi') and not os.path.exists(os.path.join(path, 'install.rdf')):
|
||||
assert os.path.isdir(path), "Addon '%s' cannot be installed" % path
|
||||
addons = [os.path.join(path, x) for x in os.listdir(path)]
|
||||
# If the path doesn't exist, then we don't really care, just return
|
||||
if not os.path.isdir(path):
|
||||
return
|
||||
addons = [os.path.join(path, x) for x in os.listdir(path) if
|
||||
os.path.isdir(os.path.join(path, x))]
|
||||
|
||||
# install each addon
|
||||
for addon in addons:
|
||||
@ -201,7 +204,7 @@ class AddonManager(object):
|
||||
assert addon_id, 'The addon id could not be found: %s' % addon
|
||||
|
||||
# copy the addon to the profile
|
||||
extensions_path = os.path.join(self.profile, 'extensions')
|
||||
extensions_path = os.path.join(self.profile, 'extensions', 'staged')
|
||||
addon_path = os.path.join(extensions_path, addon_id)
|
||||
if not unpack and not addon_details['unpack'] and xpifile:
|
||||
if not os.path.exists(extensions_path):
|
||||
|
@ -128,9 +128,7 @@ class ServerLocations(object):
|
||||
if self.hasPrimary:
|
||||
raise MultiplePrimaryLocationsError()
|
||||
self.hasPrimary = True
|
||||
for loc in self._locations:
|
||||
if loc.isEqual(location):
|
||||
raise DuplicateLocationError(location.url())
|
||||
|
||||
self._locations.append(location)
|
||||
if self.add_callback and not suppress_callback:
|
||||
self.add_callback([location])
|
||||
@ -150,7 +148,7 @@ class ServerLocations(object):
|
||||
|
||||
This format:
|
||||
http://mxr.mozilla.org/mozilla-central/source/build/pgo/server-locations.txt
|
||||
The only exception is that the port, if not defined, defaults to 80.
|
||||
The only exception is that the port, if not defined, defaults to 80 or 443.
|
||||
|
||||
FIXME: Shouldn't this default to the protocol-appropriate port? Is
|
||||
there any reason to have defaults at all?
|
||||
@ -185,7 +183,11 @@ class ServerLocations(object):
|
||||
host, port = netloc.rsplit(':', 1)
|
||||
except ValueError:
|
||||
host = netloc
|
||||
port = '80'
|
||||
default_ports = {'http': '80',
|
||||
'https': '443',
|
||||
'ws': '443',
|
||||
'wss': '443'}
|
||||
port = default_ports.get(scheme, '80')
|
||||
|
||||
try:
|
||||
location = Location(scheme, host, port, options)
|
||||
@ -268,8 +270,8 @@ class Permissions(object):
|
||||
for (i, l) in itertools.izip(itertools.count(1), privileged):
|
||||
prefs.append(("capability.principal.codebase.p%s.granted" % i, "UniversalXPConnect"))
|
||||
|
||||
# TODO: do we need the port?
|
||||
prefs.append(("capability.principal.codebase.p%s.id" % i, l.scheme + "://" + l.host))
|
||||
prefs.append(("capability.principal.codebase.p%s.id" % i, "%s://%s:%s" %
|
||||
(l.scheme, l.host, l.port)))
|
||||
prefs.append(("capability.principal.codebase.p%s.subjectName" % i, ""))
|
||||
|
||||
if proxy:
|
||||
@ -289,18 +291,19 @@ class Permissions(object):
|
||||
|
||||
# We need to proxy every server but the primary one.
|
||||
origins = ["'%s'" % l.url()
|
||||
for l in self._locations
|
||||
if "primary" not in l.options]
|
||||
for l in self._locations]
|
||||
|
||||
origins = ", ".join(origins)
|
||||
|
||||
# TODO: this is not a reliable way to determine the Proxy host
|
||||
for l in self._locations:
|
||||
if "primary" in l.options:
|
||||
webServer = l.host
|
||||
httpPort = l.port
|
||||
sslPort = 443
|
||||
port = l.port
|
||||
|
||||
# TODO: this should live in a template!
|
||||
# TODO: So changing the 5th line of the regex below from (\\\\\\\\d+)
|
||||
# to (\\\\d+) makes this code work. Not sure why there would be this
|
||||
# difference between automation.py.in and this file.
|
||||
pacURL = """data:text/plain,
|
||||
function FindProxyForURL(url, host)
|
||||
{
|
||||
@ -309,7 +312,7 @@ function FindProxyForURL(url, host)
|
||||
'://' +
|
||||
'(?:[^/@]*@)?' +
|
||||
'(.*?)' +
|
||||
'(?::(\\\\\\\\d+))?/');
|
||||
'(?::(\\\\d+))?/');
|
||||
var matches = regex.exec(url);
|
||||
if (!matches)
|
||||
return 'DIRECT';
|
||||
@ -330,15 +333,12 @@ function FindProxyForURL(url, host)
|
||||
var origin = matches[1] + '://' + matches[2] + ':' + matches[3];
|
||||
if (origins.indexOf(origin) < 0)
|
||||
return 'DIRECT';
|
||||
if (isHttp)
|
||||
return 'PROXY %(remote)s:%(httpport)s';
|
||||
if (isHttps || isWebSocket || isWebSocketSSL)
|
||||
return 'PROXY %(remote)s:%(sslport)s';
|
||||
if (isHttp || isHttps || isWebSocket || isWebSocketSSL)
|
||||
return 'PROXY %(remote)s:%(port)s';
|
||||
return 'DIRECT';
|
||||
}""" % { "origins": origins,
|
||||
"remote": webServer,
|
||||
"httpport":httpPort,
|
||||
"sslport": sslPort }
|
||||
"port": port }
|
||||
pacURL = "".join(pacURL.splitlines())
|
||||
|
||||
prefs.append(("network.proxy.type", 2))
|
||||
|
@ -27,7 +27,7 @@ class Profile(object):
|
||||
addon_manifests=None, # Manifest for addons, see http://ahal.ca/blog/2011/bulk-installing-fx-addons/
|
||||
preferences=None, # Dictionary or class of preferences
|
||||
locations=None, # locations to proxy
|
||||
proxy=False, # setup a proxy
|
||||
proxy=None, # setup a proxy - dict of server-loc,server-port,ssl-port
|
||||
restore=True # If true remove all installed addons preferences when cleaning up
|
||||
):
|
||||
|
||||
|
@ -81,19 +81,20 @@ http://127.0.0.1:8888 privileged
|
||||
perms = Permissions(self.profile_dir, self.locations_file.name)
|
||||
|
||||
prefs, user_prefs = perms.network_prefs(False)
|
||||
|
||||
self.assertEqual(len(user_prefs), 0)
|
||||
self.assertEqual(len(prefs), 6)
|
||||
|
||||
self.assertEqual(prefs[0], ('capability.principal.codebase.p1.granted',
|
||||
'UniversalXPConnect'))
|
||||
self.assertEqual(prefs[1], ('capability.principal.codebase.p1.id',
|
||||
'http://mochi.test'))
|
||||
'http://mochi.test:8888'))
|
||||
self.assertEqual(prefs[2], ('capability.principal.codebase.p1.subjectName', ''))
|
||||
|
||||
self.assertEqual(prefs[3], ('capability.principal.codebase.p2.granted',
|
||||
'UniversalXPConnect'))
|
||||
self.assertEqual(prefs[4], ('capability.principal.codebase.p2.id',
|
||||
'http://127.0.0.1'))
|
||||
'http://127.0.0.1:8888'))
|
||||
self.assertEqual(prefs[5], ('capability.principal.codebase.p2.subjectName', ''))
|
||||
|
||||
|
||||
@ -102,10 +103,10 @@ http://127.0.0.1:8888 privileged
|
||||
self.assertEqual(user_prefs[0], ('network.proxy.type', 2))
|
||||
self.assertEqual(user_prefs[1][0], 'network.proxy.autoconfig_url')
|
||||
|
||||
origins_decl = "var origins = ['http://127.0.0.1:80', 'http://127.0.0.1:8888'];"
|
||||
origins_decl = "var origins = ['http://mochi.test:8888', 'http://127.0.0.1:80', 'http://127.0.0.1:8888'];"
|
||||
self.assertTrue(origins_decl in user_prefs[1][1])
|
||||
|
||||
proxy_check = "if (isHttp) return 'PROXY mochi.test:8888'; if (isHttps || isWebSocket || isWebSocketSSL) return 'PROXY mochi.test:443';"
|
||||
proxy_check = "if (isHttp || isHttps || isWebSocket || isWebSocketSSL) return 'PROXY mochi.test:8888';"
|
||||
self.assertTrue(proxy_check in user_prefs[1][1])
|
||||
|
||||
|
||||
|
@ -82,8 +82,11 @@ http://example.org:80 privileged
|
||||
self.assertRaises(MultiplePrimaryLocationsError, locations.add_host,
|
||||
'primary.test', options='primary')
|
||||
|
||||
self.assertRaises(DuplicateLocationError, locations.add_host,
|
||||
'127.0.0.1')
|
||||
# We no longer throw these DuplicateLocation Error
|
||||
try:
|
||||
locations.add_host('127.0.0.1')
|
||||
except DuplicateLocationError:
|
||||
self.assertTrue(False, "Should no longer throw DuplicateLocationError")
|
||||
|
||||
self.assertRaises(BadPortLocationError, locations.add_host, '127.0.0.1',
|
||||
port='abc')
|
||||
|
@ -14,7 +14,6 @@ import subprocess
|
||||
import sys
|
||||
import ConfigParser
|
||||
|
||||
from threading import Thread
|
||||
from utils import get_metadata_from_egg
|
||||
from utils import findInPath
|
||||
from mozprofile import *
|
||||
@ -151,10 +150,14 @@ class Runner(object):
|
||||
def is_running(self):
|
||||
return self.process_handler is not None
|
||||
|
||||
def start(self, debug_args=None, interactive=False):
|
||||
def start(self, debug_args=None, interactive=False, timeout=None, outputTimeout=None):
|
||||
"""
|
||||
Run self.command in the proper environment.
|
||||
- debug_args: arguments for the debugger
|
||||
- interactive: uses subprocess.Popen directly
|
||||
- read_output: sends program output to stdout [default=False]
|
||||
- timeout: see process_handler.waitForFinish
|
||||
- outputTimeout: see process_handler.waitForFinish
|
||||
"""
|
||||
|
||||
# ensure you are stopped
|
||||
@ -171,7 +174,6 @@ class Runner(object):
|
||||
if debug_args:
|
||||
cmd = list(debug_args) + cmd
|
||||
|
||||
#
|
||||
if interactive:
|
||||
self.process_handler = subprocess.Popen(cmd, env=self.env)
|
||||
# TODO: other arguments
|
||||
@ -180,18 +182,26 @@ class Runner(object):
|
||||
self.process_handler = self.process_class(cmd, env=self.env, **self.kp_kwargs)
|
||||
self.process_handler.run()
|
||||
|
||||
# Spin a thread to handle reading the output
|
||||
self.outThread = OutputThread(self.process_handler)
|
||||
self.outThread.start()
|
||||
# start processing output from the process
|
||||
self.process_handler.processOutput(timeout, outputTimeout)
|
||||
|
||||
def wait(self, timeout=None, outputTimeout=None):
|
||||
"""Wait for the app to exit."""
|
||||
def wait(self, timeout=None):
|
||||
"""
|
||||
Wait for the app to exit.
|
||||
|
||||
If timeout is not None, will return after timeout seconds.
|
||||
Use is_running() to determine whether or not a timeout occured.
|
||||
Timeout is ignored if interactive was set to True.
|
||||
"""
|
||||
if self.process_handler is None:
|
||||
return
|
||||
if isinstance(self.process_handler, subprocess.Popen):
|
||||
self.process_handler.wait()
|
||||
else:
|
||||
self.process_handler.waitForFinish(timeout=timeout, outputTimeout=outputTimeout)
|
||||
self.process_handler.waitForFinish(timeout)
|
||||
if not getattr(self.process_handler.proc, 'returncode', False):
|
||||
# waitForFinish timed out
|
||||
return
|
||||
self.process_handler = None
|
||||
|
||||
def stop(self):
|
||||
@ -247,13 +257,6 @@ class ThunderbirdRunner(Runner):
|
||||
runners = {'firefox': FirefoxRunner,
|
||||
'thunderbird': ThunderbirdRunner}
|
||||
|
||||
class OutputThread(Thread):
|
||||
def __init__(self, prochandler):
|
||||
Thread.__init__(self)
|
||||
self.ph = prochandler
|
||||
def run(self):
|
||||
self.ph.waitForFinish()
|
||||
|
||||
class CLI(MozProfileCLI):
|
||||
"""Command line interface."""
|
||||
|
||||
|
@ -8,6 +8,7 @@
|
||||
# run with
|
||||
# https://github.com/mozilla/mozbase/blob/master/test.py
|
||||
|
||||
[include:manifestdestiny/tests/manifest.ini]
|
||||
[include:mozprocess/tests/manifest.ini]
|
||||
[include:mozprofile/tests/manifest.ini]
|
||||
[include:mozhttpd/tests/manifest.ini]
|
||||
|
@ -1,5 +1 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
recursive-include peptest/extension *
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
recursive-include peptest/extension *
|
@ -3,7 +3,6 @@
|
||||
- License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
- file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
|
||||
|
||||
|
||||
<!--
|
||||
build.xml adapted from Shawn Wilsher's rtse
|
||||
(http://shawnwilsher.com/extensions/rtse/)
|
||||
|
@ -5,5 +5,5 @@
|
||||
These folders are pulled from:
|
||||
https://github.com/mozilla/mozmill/tree/master/mozmill/mozmill/extension/resource
|
||||
|
||||
To update them, simply checkout the mozmill repo at https://github.com/mozautomation/mozmill,
|
||||
To update them, simply checkout the mozmill repo at https://github.com/mozilla/mozmill,
|
||||
then copy and paste the 'driver' and 'stdlib' folders to this location.
|
||||
|
@ -62,24 +62,22 @@ class PepProcess(ProcessHandler):
|
||||
else:
|
||||
threshold = 0.0
|
||||
|
||||
msg = results.currentTest \
|
||||
+ ' | fail threshold: ' + str(threshold)
|
||||
msg = '%s | fail threshold: %s' % (results.currentTest, threshold)
|
||||
if metric > threshold:
|
||||
msg += ' < metric: ' + str(metric)
|
||||
msg += ' < metric: %s' % metric
|
||||
self.logger.testFail(msg)
|
||||
else:
|
||||
msg += ' >= metric: ' + str(metric)
|
||||
msg += ' >= metric: %s' % metric
|
||||
self.logger.testPass(msg)
|
||||
|
||||
self.logger.testEnd(
|
||||
results.currentTest +
|
||||
' | finished in: ' + tokens[3].rstrip() + ' ms')
|
||||
self.logger.testEnd('%s | finished in: %s ms' %
|
||||
(results.currentTest, tokens[3].rstrip()))
|
||||
results.currentTest = None
|
||||
elif level == 'ACTION-START':
|
||||
results.currentAction = tokens[3].rstrip()
|
||||
self.logger.debug(level + ' | ' + results.currentAction)
|
||||
self.logger.debug('%s | %s' % (level, results.currentAction))
|
||||
elif level == 'ACTION-END':
|
||||
self.logger.debug(level + ' | ' + results.currentAction)
|
||||
self.logger.debug('%s | %s' % (level, results.currentAction))
|
||||
results.currentAction = None
|
||||
elif level in ['DEBUG', 'INFO', 'WARNING', 'ERROR']:
|
||||
line = line[len('PEP ' + level)+1:]
|
||||
@ -93,7 +91,6 @@ class PepProcess(ProcessHandler):
|
||||
# The output is generated from EventTracer
|
||||
# Format is 'MOZ_EVENT_TRACE sample <TIMESTAMP> <VALUE>
|
||||
# <VALUE> is the unresponsive time in ms
|
||||
self.logger.warning(
|
||||
results.currentTest + ' | ' + results.currentAction +
|
||||
' | unresponsive time: ' + tokens[3].rstrip() + ' ms')
|
||||
self.logger.warning('%s | %s | unresponsive time: %s ms' %
|
||||
(results.currentTest, results.currentAction, tokens[3].rstrip()))
|
||||
results.fails[results.currentTest].append(int(tokens[3].rstrip()))
|
||||
|
@ -49,7 +49,8 @@ class Peptest():
|
||||
self.logger.warning('Can\'t set up proxy without server path')
|
||||
else:
|
||||
enable_proxy = True
|
||||
locations.read(self.options.proxyLocations, False)
|
||||
for proxyLocation in self.options.proxyLocations:
|
||||
locations.read(proxyLocation, False)
|
||||
locations.add_host(host='127.0.0.1',
|
||||
port=self.options.serverPort,
|
||||
options='primary,privileged')
|
||||
@ -113,8 +114,8 @@ class Peptest():
|
||||
self.logger.debug('Starting Peptest')
|
||||
|
||||
# start firefox
|
||||
self.runner.start()
|
||||
self.runner.wait(outputTimeout=self.options.timeout)
|
||||
self.runner.start(outputTimeout=self.options.timeout)
|
||||
self.runner.wait()
|
||||
crashed = self.checkForCrashes(results.currentTest)
|
||||
self.stop()
|
||||
|
||||
@ -293,11 +294,11 @@ class PeptestOptions(OptionParser):
|
||||
"If none specified, a temporary profile is created")
|
||||
|
||||
self.add_option("--proxy",
|
||||
action="store", type="string", dest="proxyLocations",
|
||||
action="append", type="string", dest="proxyLocations",
|
||||
default=None,
|
||||
help="path to a server-location file specifying "
|
||||
"domains to proxy. --server-path must also be "
|
||||
"specified.")
|
||||
help="a list of paths to server-location files specifying "
|
||||
"domains to proxy (set with multiple --proxy agruments). "
|
||||
"--server-path must also be specified.")
|
||||
|
||||
self.add_option("--proxy-host-dirs",
|
||||
action="store_true", dest="proxyHostDirs",
|
||||
|
Loading…
x
Reference in New Issue
Block a user