gecko-dev/build/automation.py.in

1289 lines
47 KiB
Python
Raw Normal View History

#
2012-05-21 11:12:37 +00:00
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import with_statement
import codecs
from datetime import datetime, timedelta
import itertools
import logging
import os
import re
import select
import shutil
import signal
import subprocess
import sys
import threading
import tempfile
import sqlite3
from string import Template
SCRIPT_DIR = os.path.abspath(os.path.realpath(os.path.dirname(sys.argv[0])))
sys.path.insert(0, SCRIPT_DIR)
import automationutils
_DEFAULT_WEB_SERVER = "127.0.0.1"
_DEFAULT_HTTP_PORT = 8888
_DEFAULT_SSL_PORT = 4443
_DEFAULT_WEBSOCKET_PORT = 9988
# from nsIPrincipal.idl
_APP_STATUS_NOT_INSTALLED = 0
_APP_STATUS_INSTALLED = 1
_APP_STATUS_PRIVILEGED = 2
_APP_STATUS_CERTIFIED = 3
#expand _DIST_BIN = __XPC_BIN_PATH__
#expand _IS_WIN32 = len("__WIN32__") != 0
#expand _IS_MAC = __IS_MAC__ != 0
#expand _IS_LINUX = __IS_LINUX__ != 0
#ifdef IS_CYGWIN
#expand _IS_CYGWIN = __IS_CYGWIN__ == 1
#else
_IS_CYGWIN = False
#endif
#expand _IS_CAMINO = __IS_CAMINO__ != 0
#expand _BIN_SUFFIX = __BIN_SUFFIX__
#expand _PERL = __PERL__
#expand _DEFAULT_APP = "./" + __BROWSER_PATH__
#expand _CERTS_SRC_DIR = __CERTS_SRC_DIR__
#expand _IS_TEST_BUILD = __IS_TEST_BUILD__
#expand _IS_DEBUG_BUILD = __IS_DEBUG_BUILD__
#expand _CRASHREPORTER = __CRASHREPORTER__ == 1
#expand _IS_ASAN = __IS_ASAN__ == 1
if _IS_WIN32:
import ctypes, ctypes.wintypes, time, msvcrt
else:
import errno
# We use the logging system here primarily because it'll handle multiple
# threads, which is needed to process the output of the server and application
# processes simultaneously.
_log = logging.getLogger()
handler = logging.StreamHandler(sys.stdout)
_log.setLevel(logging.INFO)
_log.addHandler(handler)
#################
# PROFILE SETUP #
#################
class SyntaxError(Exception):
"Signifies a syntax error on a particular line in server-locations.txt."
def __init__(self, lineno, msg = None):
self.lineno = lineno
self.msg = msg
def __str__(self):
s = "Syntax error on line " + str(self.lineno)
if self.msg:
s += ": %s." % self.msg
else:
s += "."
return s
class Location:
"Represents a location line in server-locations.txt."
def __init__(self, scheme, host, port, options):
self.scheme = scheme
self.host = host
self.port = port
self.options = options
class Automation(object):
"""
Runs the browser from a script, and provides useful utilities
for setting up the browser environment.
"""
DIST_BIN = _DIST_BIN
IS_WIN32 = _IS_WIN32
IS_MAC = _IS_MAC
IS_LINUX = _IS_LINUX
IS_CYGWIN = _IS_CYGWIN
IS_CAMINO = _IS_CAMINO
BIN_SUFFIX = _BIN_SUFFIX
PERL = _PERL
UNIXISH = not IS_WIN32 and not IS_MAC
DEFAULT_APP = _DEFAULT_APP
CERTS_SRC_DIR = _CERTS_SRC_DIR
IS_TEST_BUILD = _IS_TEST_BUILD
IS_DEBUG_BUILD = _IS_DEBUG_BUILD
CRASHREPORTER = _CRASHREPORTER
IS_ASAN = _IS_ASAN
# timeout, in seconds
DEFAULT_TIMEOUT = 60.0
DEFAULT_WEB_SERVER = _DEFAULT_WEB_SERVER
DEFAULT_HTTP_PORT = _DEFAULT_HTTP_PORT
DEFAULT_SSL_PORT = _DEFAULT_SSL_PORT
DEFAULT_WEBSOCKET_PORT = _DEFAULT_WEBSOCKET_PORT
def __init__(self):
self.log = _log
self.lastTestSeen = "automation.py"
self.haveDumpedScreen = False
def setServerInfo(self,
webServer = _DEFAULT_WEB_SERVER,
httpPort = _DEFAULT_HTTP_PORT,
sslPort = _DEFAULT_SSL_PORT,
webSocketPort = _DEFAULT_WEBSOCKET_PORT):
self.webServer = webServer
self.httpPort = httpPort
self.sslPort = sslPort
self.webSocketPort = webSocketPort
@property
def __all__(self):
return [
"UNIXISH",
"IS_WIN32",
"IS_MAC",
"log",
"runApp",
"Process",
"addCommonOptions",
"initializeProfile",
"DIST_BIN",
"DEFAULT_APP",
"CERTS_SRC_DIR",
"environment",
"IS_TEST_BUILD",
"IS_DEBUG_BUILD",
"DEFAULT_TIMEOUT",
]
class Process(subprocess.Popen):
"""
Represents our view of a subprocess.
It adds a kill() method which allows it to be stopped explicitly.
"""
def __init__(self,
args,
bufsize=0,
executable=None,
stdin=None,
stdout=None,
stderr=None,
preexec_fn=None,
close_fds=False,
shell=False,
cwd=None,
env=None,
universal_newlines=False,
startupinfo=None,
creationflags=0):
args = automationutils.wrapCommand(args)
print "args: %s" % args
subprocess.Popen.__init__(self, args, bufsize, executable,
stdin, stdout, stderr,
preexec_fn, close_fds,
shell, cwd, env,
universal_newlines, startupinfo, creationflags)
self.log = _log
def kill(self):
if Automation().IS_WIN32:
import platform
pid = "%i" % self.pid
if platform.release() == "2000":
# Windows 2000 needs 'kill.exe' from the
#'Windows 2000 Resource Kit tools'. (See bug 475455.)
try:
subprocess.Popen(["kill", "-f", pid]).wait()
except:
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Missing 'kill' utility to kill process with pid=%s. Kill it manually!", pid)
else:
# Windows XP and later.
subprocess.Popen(["taskkill", "/F", "/PID", pid]).wait()
else:
os.kill(self.pid, signal.SIGKILL)
def readLocations(self, locationsPath = "server-locations.txt"):
"""
Reads the locations at which the Mochitest HTTP server is available from
server-locations.txt.
"""
locationFile = codecs.open(locationsPath, "r", "UTF-8")
# Perhaps more detail than necessary, but it's the easiest way to make sure
# we get exactly the format we want. See server-locations.txt for the exact
# format guaranteed here.
lineRe = re.compile(r"^(?P<scheme>[a-z][-a-z0-9+.]*)"
r"://"
r"(?P<host>"
r"\d+\.\d+\.\d+\.\d+"
r"|"
r"(?:[a-z0-9](?:[-a-z0-9]*[a-z0-9])?\.)*"
r"[a-z](?:[-a-z0-9]*[a-z0-9])?"
r")"
r":"
r"(?P<port>\d+)"
r"(?:"
r"\s+"
r"(?P<options>\S+(?:,\S+)*)"
r")?$")
locations = []
lineno = 0
seenPrimary = False
for line in locationFile:
lineno += 1
if line.startswith("#") or line == "\n":
continue
match = lineRe.match(line)
if not match:
raise SyntaxError(lineno)
options = match.group("options")
if options:
options = options.split(",")
if "primary" in options:
if seenPrimary:
raise SyntaxError(lineno, "multiple primary locations")
seenPrimary = True
else:
options = []
locations.append(Location(match.group("scheme"), match.group("host"),
match.group("port"), options))
if not seenPrimary:
raise SyntaxError(lineno + 1, "missing primary location")
return locations
def setupPermissionsDatabase(self, profileDir, permissions):
# Open database and create table
permDB = sqlite3.connect(os.path.join(profileDir, "permissions.sqlite"))
cursor = permDB.cursor();
cursor.execute("PRAGMA user_version=3");
# SQL copied from nsPermissionManager.cpp
cursor.execute("""CREATE TABLE IF NOT EXISTS moz_hosts (
id INTEGER PRIMARY KEY,
host TEXT,
type TEXT,
permission INTEGER,
expireType INTEGER,
expireTime INTEGER,
appId INTEGER,
isInBrowserElement INTEGER)""")
# Insert desired permissions
for perm in permissions.keys():
for host,allow in permissions[perm]:
cursor.execute("INSERT INTO moz_hosts values(NULL, ?, ?, ?, 0, 0, 0, 0)",
(host, perm, 1 if allow else 2))
# Commit and close
permDB.commit()
cursor.close()
def setupTestApps(self, profileDir, apps):
webappJSONTemplate = Template(""""$id": {
"origin": "$origin",
"installOrigin": "$origin",
"receipt": null,
"installTime": 132333986000,
"manifestURL": "$manifestURL",
"localId": $localId,
"id": "$id",
"appStatus": $appStatus,
"csp": "$csp"
}""")
manifestTemplate = Template("""{
"name": "$name",
2012-10-19 10:43:17 +00:00
"csp": "$csp",
"description": "$description",
"launch_path": "/",
"developer": {
"name": "Mozilla",
"url": "https://mozilla.org/"
},
"permissions": [
],
"locales": {
"en-US": {
"name": "$name",
"description": "$description"
}
},
"default_locale": "en-US",
"icons": {
}
}
""")
# Create webapps/webapps.json
webappsDir = os.path.join(profileDir, "webapps")
if not os.access(webappsDir, os.F_OK):
os.mkdir(webappsDir)
lineRe = re.compile(r'(.*?)"(.*?)": (.*)')
webappsJSONFilename = os.path.join(webappsDir, "webapps.json")
webappsJSON = []
if os.access(webappsJSONFilename, os.F_OK):
# If there is an existing webapps.json file (which will be the case for
# b2g), we parse the data in the existing file before appending test
# test apps to it.
startId = 1
webappsJSONFile = open(webappsJSONFilename, "r")
contents = webappsJSONFile.read()
for app_content in contents.split('},'):
app = {}
# ghetto json parser needed due to lack of json/simplejson on test slaves
for line in app_content.split('\n'):
m = lineRe.match(line)
if m:
value = m.groups()[2]
# remove any trailing commas
if value[-1:] == ',':
value = value[:-1]
# set the app name from a line that looks like this:
# "name.gaiamobile.org": {
if value == '{':
app['id'] = m.groups()[1]
# parse string, None, bool and int types
elif value[0:1] == '"':
app[m.groups()[1]] = value[1:-1]
elif value == "null":
app[m.groups()[1]] = None
elif value == "true":
app[m.groups()[1]] = True
elif value == "false":
app[m.groups()[1]] = False
else:
app[m.groups()[1]] = int(value)
if app:
webappsJSON.append(app)
webappsJSONFile.close()
startId = 1
for app in webappsJSON:
if app['localId'] >= startId:
startId = app['localId'] + 1
if not app.get('csp'):
app['csp'] = ''
if not app.get('appStatus'):
app['appStatus'] = 3
for localId, app in enumerate(apps):
app['localId'] = localId + startId # localId must be from 1..N
if not app.get('id'):
app['id'] = app['name']
webappsJSON.append(app)
contents = []
for app in webappsJSON:
contents.append(webappJSONTemplate.substitute(app))
contents = '{\n' + ',\n'.join(contents) + '\n}\n'
webappsJSONFile = open(webappsJSONFilename, "w")
webappsJSONFile.write(contents)
webappsJSONFile.close()
# Create manifest file for each app.
for app in apps:
manifest = manifestTemplate.substitute(app)
manifestDir = os.path.join(webappsDir, app['name'])
os.mkdir(manifestDir)
manifestFile = open(os.path.join(manifestDir, "manifest.webapp"), "a")
manifestFile.write(manifest)
manifestFile.close()
def initializeProfile(self, profileDir, extraPrefs=[],
useServerLocations=False,
initialProfile=None):
" Sets up the standard testing profile."
prefs = []
# Start with a clean slate.
shutil.rmtree(profileDir, True)
if initialProfile:
shutil.copytree(initialProfile, profileDir)
else:
os.mkdir(profileDir)
# Set up permissions database
locations = self.readLocations()
self.setupPermissionsDatabase(profileDir,
{'allowXULXBL':[(l.host, 'noxul' not in l.options) for l in locations]});
part = """\
user_pref("social.skipLoadingProviders", true);
user_pref("browser.console.showInPanel", true);
user_pref("browser.dom.window.dump.enabled", true);
user_pref("browser.firstrun.show.localepicker", false);
user_pref("browser.firstrun.show.uidiscovery", false);
user_pref("browser.startup.page", 0); // use about:blank, not browser.startup.homepage
user_pref("browser.ui.layout.tablet", 0); // force tablet UI off
user_pref("dom.allow_scripts_to_close_windows", true);
user_pref("dom.disable_open_during_load", false);
user_pref("dom.max_script_run_time", 0); // no slow script dialogs
user_pref("hangmonitor.timeout", 0); // no hang monitor
user_pref("dom.max_chrome_script_run_time", 0);
user_pref("dom.popup_maximum", -1);
user_pref("dom.send_after_paint_to_content", true);
user_pref("dom.successive_dialog_time_limit", 0);
user_pref("signed.applets.codebase_principal_support", true);
user_pref("browser.shell.checkDefaultBrowser", false);
user_pref("shell.checkDefaultClient", false);
user_pref("browser.warnOnQuit", false);
user_pref("accessibility.typeaheadfind.autostart", false);
user_pref("javascript.options.showInConsole", true);
user_pref("devtools.errorconsole.enabled", true);
user_pref("layout.debug.enable_data_xbl", true);
user_pref("browser.EULA.override", true);
user_pref("javascript.options.jit_hardening", true);
user_pref("gfx.color_management.force_srgb", true);
user_pref("network.manage-offline-status", false);
user_pref("dom.min_background_timeout_value", 1000);
user_pref("test.mousescroll", true);
user_pref("security.default_personal_cert", "Select Automatically"); // Need to client auth test be w/o any dialogs
user_pref("network.http.prompt-temp-redirect", false);
user_pref("media.cache_size", 100);
user_pref("security.warn_viewing_mixed", false);
user_pref("app.update.enabled", false);
user_pref("app.update.staging.enabled", false);
user_pref("browser.panorama.experienced_first_run", true); // Assume experienced
user_pref("dom.w3c_touch_events.enabled", 1);
user_pref("dom.undo_manager.enabled", true);
// Set a future policy version to avoid the telemetry prompt.
user_pref("toolkit.telemetry.prompted", 999);
user_pref("toolkit.telemetry.notifiedOptOut", 999);
// Existing tests assume there is no font size inflation.
user_pref("font.size.inflation.emPerLine", 0);
user_pref("font.size.inflation.minTwips", 0);
// Only load extensions from the application and user profile
// AddonManager.SCOPE_PROFILE + AddonManager.SCOPE_APPLICATION
user_pref("extensions.enabledScopes", 5);
// Disable metadata caching for installed add-ons by default
user_pref("extensions.getAddons.cache.enabled", false);
// Disable intalling any distribution add-ons
user_pref("extensions.installDistroAddons", false);
user_pref("extensions.testpilot.runStudies", false);
user_pref("extensions.testpilot.alreadyCustomizedToolbar", true);
user_pref("geo.wifi.uri", "http://%(server)s/tests/dom/tests/mochitest/geolocation/network_geolocation.sjs");
user_pref("geo.wifi.testing", true);
user_pref("geo.ignore.location_filter", true);
user_pref("camino.warn_when_closing", false); // Camino-only, harmless to others
// Make url-classifier updates so rare that they won't affect tests
user_pref("urlclassifier.updateinterval", 172800);
// Point the url-classifier to the local testing server for fast failures
user_pref("browser.safebrowsing.gethashURL", "http://%(server)s/safebrowsing-dummy/gethash");
user_pref("browser.safebrowsing.keyURL", "http://%(server)s/safebrowsing-dummy/newkey");
user_pref("browser.safebrowsing.updateURL", "http://%(server)s/safebrowsing-dummy/update");
// Point update checks to the local testing server for fast failures
user_pref("extensions.update.url", "http://%(server)s/extensions-dummy/updateURL");
user_pref("extensions.update.background.url", "http://%(server)s/extensions-dummy/updateBackgroundURL");
user_pref("extensions.blocklist.url", "http://%(server)s/extensions-dummy/blocklistURL");
user_pref("extensions.hotfix.url", "http://%(server)s/extensions-dummy/hotfixURL");
// Turn off extension updates so they don't bother tests
user_pref("extensions.update.enabled", false);
// Make sure opening about:addons won't hit the network
user_pref("extensions.webservice.discoverURL", "http://%(server)s/extensions-dummy/discoveryURL");
// Make sure AddonRepository won't hit the network
user_pref("extensions.getAddons.maxResults", 0);
user_pref("extensions.getAddons.get.url", "http://%(server)s/extensions-dummy/repositoryGetURL");
user_pref("extensions.getAddons.getWithPerformance.url", "http://%(server)s/extensions-dummy/repositoryGetWithPerformanceURL");
user_pref("extensions.getAddons.search.browseURL", "http://%(server)s/extensions-dummy/repositoryBrowseURL");
user_pref("extensions.getAddons.search.url", "http://%(server)s/extensions-dummy/repositorySearchURL");
// Make sure that opening the plugins check page won't hit the network
user_pref("plugins.update.url", "http://%(server)s/plugins-dummy/updateCheckURL");
// Make enablePrivilege continue to work for test code. :-(
user_pref("security.turn_off_all_security_so_that_viruses_can_take_over_this_computer", true);
// Get network events.
user_pref("network.activity.blipIntervalMilliseconds", 250);
// Don't allow the Data Reporting service to prompt for policy acceptance.
user_pref("datareporting.policy.dataSubmissionPolicyBypassAcceptance", true);
// Make sure CSS error reporting is enabled for tests
user_pref("layout.css.report_errors", true);
""" % { "server" : self.webServer + ":" + str(self.httpPort) }
prefs.append(part)
if useServerLocations:
# We need to proxy every server but the primary one.
origins = ["'%s://%s:%s'" % (l.scheme, l.host, l.port)
for l in filter(lambda l: "primary" not in l.options, locations)]
origins = ", ".join(origins)
pacURL = """data:text/plain,
function FindProxyForURL(url, host)
{
var origins = [%(origins)s];
var regex = new RegExp('^([a-z][-a-z0-9+.]*)' +
'://' +
'(?:[^/@]*@)?' +
'(.*?)' +
'(?::(\\\\\\\\d+))?/');
var matches = regex.exec(url);
if (!matches)
return 'DIRECT';
var isHttp = matches[1] == 'http';
var isHttps = matches[1] == 'https';
var isWebSocket = matches[1] == 'ws';
var isWebSocketSSL = matches[1] == 'wss';
if (!matches[3])
{
if (isHttp | isWebSocket) matches[3] = '80';
if (isHttps | isWebSocketSSL) matches[3] = '443';
}
if (isWebSocket)
matches[1] = 'http';
if (isWebSocketSSL)
matches[1] = 'https';
var origin = matches[1] + '://' + matches[2] + ':' + matches[3];
if (origins.indexOf(origin) < 0)
return 'DIRECT';
if (isHttp)
return 'PROXY %(remote)s:%(httpport)s';
if (isHttps || isWebSocket || isWebSocketSSL)
return 'PROXY %(remote)s:%(sslport)s';
return 'DIRECT';
}""" % { "origins": origins,
"remote": self.webServer,
"httpport":self.httpPort,
"sslport": self.sslPort }
pacURL = "".join(pacURL.splitlines())
part += """
user_pref("network.proxy.type", 2);
user_pref("network.proxy.autoconfig_url", "%(pacURL)s");
user_pref("camino.use_system_proxy_settings", false); // Camino-only, harmless to others
""" % {"pacURL": pacURL}
prefs.append(part)
else:
part = 'user_pref("network.proxy.type", 0);\n'
prefs.append(part)
for v in extraPrefs:
thispref = v.split("=", 1)
if len(thispref) < 2:
print "Error: syntax error in --setpref=" + v
sys.exit(1)
part = 'user_pref("%s", %s);\n' % (thispref[0], thispref[1])
prefs.append(part)
# write the preferences
prefsFile = open(profileDir + "/" + "user.js", "a")
prefsFile.write("".join(prefs))
prefsFile.close()
apps = [
{
'name': 'http_example_org',
2012-10-19 10:43:17 +00:00
'csp': '',
'origin': 'http://example.org',
'manifestURL': 'http://example.org/manifest.webapp',
'description': 'http://example.org App',
'appStatus': _APP_STATUS_INSTALLED
},
{
'name': 'https_example_com',
2012-10-19 10:43:17 +00:00
'csp': '',
'origin': 'https://example.com',
'manifestURL': 'https://example.com/manifest.webapp',
'description': 'https://example.com App',
'appStatus': _APP_STATUS_INSTALLED
},
{
'name': 'http_test1_example_org',
2012-10-19 10:43:17 +00:00
'csp': '',
'origin': 'http://test1.example.org',
'manifestURL': 'http://test1.example.org/manifest.webapp',
'description': 'http://test1.example.org App',
'appStatus': _APP_STATUS_INSTALLED
},
{
'name': 'http_test1_example_org_8000',
2012-10-19 10:43:17 +00:00
'csp': '',
'origin': 'http://test1.example.org:8000',
'manifestURL': 'http://test1.example.org:8000/manifest.webapp',
'description': 'http://test1.example.org:8000 App',
'appStatus': _APP_STATUS_INSTALLED
},
{
'name': 'http_sub1_test1_example_org',
2012-10-19 10:43:17 +00:00
'csp': '',
'origin': 'http://sub1.test1.example.org',
'manifestURL': 'http://sub1.test1.example.org/manifest.webapp',
'description': 'http://sub1.test1.example.org App',
'appStatus': _APP_STATUS_INSTALLED
},
{
'name': 'https_example_com_privileged',
2012-10-19 10:43:17 +00:00
'csp': '',
'origin': 'https://example.com',
'manifestURL': 'https://example.com/manifest_priv.webapp',
'description': 'https://example.com Privileged App',
'appStatus': _APP_STATUS_PRIVILEGED
},
{
'name': 'https_example_com_certified',
2012-10-19 10:43:17 +00:00
'csp': '',
'origin': 'https://example.com',
'manifestURL': 'https://example.com/manifest_cert.webapp',
'description': 'https://example.com Certified App',
'appStatus': _APP_STATUS_CERTIFIED
},
2012-10-19 10:43:17 +00:00
{
'name': 'https_example_csp_certified',
'csp': "default-src *; script-src 'self'; object-src 'none'; style-src 'self' 'unsafe-inline'",
'origin': 'https://example.com',
'manifestURL': 'https://example.com/manifest_csp_cert.webapp',
'description': 'https://example.com Certified App with manifest policy',
'appStatus': _APP_STATUS_CERTIFIED
},
2012-10-19 10:43:17 +00:00
{
'name': 'https_example_csp_installed',
'csp': "default-src *; script-src 'self'; object-src 'none'; style-src 'self' 'unsafe-inline'",
'origin': 'https://example.com',
'manifestURL': 'https://example.com/manifest_csp_inst.webapp',
'description': 'https://example.com Installed App with manifest policy',
'appStatus': _APP_STATUS_INSTALLED
},
2012-10-19 10:43:17 +00:00
{
'name': 'https_example_csp_privileged',
'csp': "default-src *; script-src 'self'; object-src 'none'; style-src 'self' 'unsafe-inline'",
'origin': 'https://example.com',
'manifestURL': 'https://example.com/manifest_csp_priv.webapp',
'description': 'https://example.com Privileged App with manifest policy',
'appStatus': _APP_STATUS_PRIVILEGED
},
{
'name': 'https_a_domain_certified',
'csp': "",
'origin': 'https://acertified.com',
'manifestURL': 'https://acertified.com/manifest.webapp',
'description': 'https://acertified.com Certified App',
'appStatus': _APP_STATUS_CERTIFIED
},
{
'name': 'https_a_domain_privileged',
'csp': "",
'origin': 'https://aprivileged.com',
'manifestURL': 'https://aprivileged.com/manifest.webapp',
'description': 'https://aprivileged.com Privileged App ',
'appStatus': _APP_STATUS_PRIVILEGED
},
];
self.setupTestApps(profileDir, apps)
def addCommonOptions(self, parser):
"Adds command-line options which are common to mochitest and reftest."
parser.add_option("--setpref",
action = "append", type = "string",
default = [],
dest = "extraPrefs", metavar = "PREF=VALUE",
help = "defines an extra user preference")
def fillCertificateDB(self, profileDir, certPath, utilityPath, xrePath):
pwfilePath = os.path.join(profileDir, ".crtdbpw")
pwfile = open(pwfilePath, "w")
pwfile.write("\n")
pwfile.close()
# Create head of the ssltunnel configuration file
sslTunnelConfigPath = os.path.join(profileDir, "ssltunnel.cfg")
sslTunnelConfig = open(sslTunnelConfigPath, "w")
sslTunnelConfig.write("httpproxy:1\n")
sslTunnelConfig.write("certdbdir:%s\n" % certPath)
sslTunnelConfig.write("forward:127.0.0.1:%s\n" % self.httpPort)
sslTunnelConfig.write("websocketserver:%s:%s\n" % (self.webServer, self.webSocketPort))
sslTunnelConfig.write("listen:*:%s:pgo server certificate\n" % self.sslPort)
# Configure automatic certificate and bind custom certificates, client authentication
locations = self.readLocations()
locations.pop(0)
for loc in locations:
if loc.scheme == "https" and "nocert" not in loc.options:
customCertRE = re.compile("^cert=(?P<nickname>[0-9a-zA-Z_ ]+)")
clientAuthRE = re.compile("^clientauth=(?P<clientauth>[a-z]+)")
redirRE = re.compile("^redir=(?P<redirhost>[0-9a-zA-Z_ .]+)")
for option in loc.options:
match = customCertRE.match(option)
if match:
customcert = match.group("nickname");
sslTunnelConfig.write("listen:%s:%s:%s:%s\n" %
(loc.host, loc.port, self.sslPort, customcert))
match = clientAuthRE.match(option)
if match:
clientauth = match.group("clientauth");
sslTunnelConfig.write("clientauth:%s:%s:%s:%s\n" %
(loc.host, loc.port, self.sslPort, clientauth))
match = redirRE.match(option)
if match:
redirhost = match.group("redirhost")
sslTunnelConfig.write("redirhost:%s:%s:%s:%s\n" %
(loc.host, loc.port, self.sslPort, redirhost))
sslTunnelConfig.close()
# Pre-create the certification database for the profile
env = self.environment(xrePath = xrePath)
certutil = os.path.join(utilityPath, "certutil" + self.BIN_SUFFIX)
pk12util = os.path.join(utilityPath, "pk12util" + self.BIN_SUFFIX)
status = self.Process([certutil, "-N", "-d", profileDir, "-f", pwfilePath], env = env).wait()
if status != 0:
return status
# Walk the cert directory and add custom CAs and client certs
files = os.listdir(certPath)
for item in files:
root, ext = os.path.splitext(item)
if ext == ".ca":
trustBits = "CT,,"
if root.endswith("-object"):
trustBits = "CT,,CT"
self.Process([certutil, "-A", "-i", os.path.join(certPath, item),
"-d", profileDir, "-f", pwfilePath, "-n", root, "-t", trustBits],
env = env).wait()
if ext == ".client":
self.Process([pk12util, "-i", os.path.join(certPath, item), "-w",
pwfilePath, "-d", profileDir],
env = env).wait()
os.unlink(pwfilePath)
return 0
def environment(self, env = None, xrePath = None, crashreporter = True):
if xrePath == None:
xrePath = self.DIST_BIN
if env == None:
env = dict(os.environ)
ldLibraryPath = os.path.abspath(os.path.join(SCRIPT_DIR, xrePath))
if self.UNIXISH or self.IS_MAC:
envVar = "LD_LIBRARY_PATH"
if self.IS_MAC:
envVar = "DYLD_LIBRARY_PATH"
else: # unixish
env['MOZILLA_FIVE_HOME'] = xrePath
if envVar in env:
ldLibraryPath = ldLibraryPath + ":" + env[envVar]
env[envVar] = ldLibraryPath
elif self.IS_WIN32:
env["PATH"] = env["PATH"] + ";" + ldLibraryPath
if crashreporter:
env['MOZ_CRASHREPORTER_NO_REPORT'] = '1'
env['MOZ_CRASHREPORTER'] = '1'
else:
env['MOZ_CRASHREPORTER_DISABLE'] = '1'
env['GNOME_DISABLE_CRASH_DIALOG'] = '1'
env['XRE_NO_WINDOWS_CRASH_DIALOG'] = '1'
env['NS_TRACE_MALLOC_DISABLE_STACKS'] = '1'
# ASan specific environment stuff
if self.IS_ASAN and (self.IS_LINUX or self.IS_MAC):
try:
totalMemory = int(os.popen("free").readlines()[1].split()[1])
# Only 2 GB RAM or less available? Use custom ASan options to reduce
# the amount of resources required to do the tests. Standard options
# will otherwise lead to OOM conditions on the current test slaves.
if totalMemory <= 1024 * 1024 * 2:
self.log.info("INFO | automation.py | ASan running in low-memory configuration")
env["ASAN_OPTIONS"] = "quarantine_size=50331648:redzone=64"
except OSError,err:
self.log.info("Failed determine available memory, disabling ASan low-memory configuration: %s", err.strerror)
except:
self.log.info("Failed determine available memory, disabling ASan low-memory configuration")
return env
if IS_WIN32:
PeekNamedPipe = ctypes.windll.kernel32.PeekNamedPipe
GetLastError = ctypes.windll.kernel32.GetLastError
def readWithTimeout(self, f, timeout):
"""Try to read a line of output from the file object |f|.
|f| must be a pipe, like the |stdout| member of a subprocess.Popen
object created with stdout=PIPE. If no output
is received within |timeout| seconds, return a blank line.
Returns a tuple (line, did_timeout), where |did_timeout| is True
if the read timed out, and False otherwise."""
if timeout is None:
# shortcut to allow callers to pass in "None" for no timeout.
return (f.readline(), False)
x = msvcrt.get_osfhandle(f.fileno())
l = ctypes.c_long()
done = time.time() + timeout
while time.time() < done:
if self.PeekNamedPipe(x, None, 0, None, ctypes.byref(l), None) == 0:
err = self.GetLastError()
if err == 38 or err == 109: # ERROR_HANDLE_EOF || ERROR_BROKEN_PIPE
return ('', False)
else:
log.error("readWithTimeout got error: %d", err)
if l.value > 0:
# we're assuming that the output is line-buffered,
# which is not unreasonable
return (f.readline(), False)
time.sleep(0.01)
return ('', True)
def isPidAlive(self, pid):
STILL_ACTIVE = 259
PROCESS_QUERY_LIMITED_INFORMATION = 0x1000
pHandle = ctypes.windll.kernel32.OpenProcess(PROCESS_QUERY_LIMITED_INFORMATION, 0, pid)
if not pHandle:
return False
pExitCode = ctypes.wintypes.DWORD()
ctypes.windll.kernel32.GetExitCodeProcess(pHandle, ctypes.byref(pExitCode))
ctypes.windll.kernel32.CloseHandle(pHandle)
return pExitCode.value == STILL_ACTIVE
def killPid(self, pid):
PROCESS_TERMINATE = 0x0001
pHandle = ctypes.windll.kernel32.OpenProcess(PROCESS_TERMINATE, 0, pid)
if not pHandle:
return
success = ctypes.windll.kernel32.TerminateProcess(pHandle, 1)
ctypes.windll.kernel32.CloseHandle(pHandle)
else:
def readWithTimeout(self, f, timeout):
"""Try to read a line of output from the file object |f|. If no output
is received within |timeout| seconds, return a blank line.
Returns a tuple (line, did_timeout), where |did_timeout| is True
if the read timed out, and False otherwise."""
(r, w, e) = select.select([f], [], [], timeout)
if len(r) == 0:
return ('', True)
return (f.readline(), False)
def isPidAlive(self, pid):
try:
# kill(pid, 0) checks for a valid PID without actually sending a signal
# The method throws OSError if the PID is invalid, which we catch below.
os.kill(pid, 0)
# Wait on it to see if it's a zombie. This can throw OSError.ECHILD if
# the process terminates before we get to this point.
wpid, wstatus = os.waitpid(pid, os.WNOHANG)
return wpid == 0
except OSError, err:
# Catch the errors we might expect from os.kill/os.waitpid,
# and re-raise any others
if err.errno == errno.ESRCH or err.errno == errno.ECHILD:
return False
raise
def killPid(self, pid):
os.kill(pid, signal.SIGKILL)
def dumpScreen(self, utilityPath):
self.haveDumpedScreen = True;
# Need to figure out what tool and whether it write to a file or stdout
if self.UNIXISH:
utility = [os.path.join(utilityPath, "screentopng")]
imgoutput = 'stdout'
elif self.IS_MAC:
utility = ['/usr/sbin/screencapture', '-C', '-x', '-t', 'png']
imgoutput = 'file'
elif self.IS_WIN32:
utility = [os.path.join(utilityPath, "screenshot.exe")]
imgoutput = 'file'
# Run the capture correctly for the type of capture
try:
if imgoutput == 'file':
tmpfd, imgfilename = tempfile.mkstemp(prefix='mozilla-test-fail_')
os.close(tmpfd)
dumper = self.Process(utility + [imgfilename])
elif imgoutput == 'stdout':
dumper = self.Process(utility, bufsize=-1,
stdout=subprocess.PIPE, close_fds=True)
except OSError, err:
self.log.info("Failed to start %s for screenshot: %s",
utility[0], err.strerror)
return
# Check whether the capture utility ran successfully
dumper_out, dumper_err = dumper.communicate()
if dumper.returncode != 0:
self.log.info("%s exited with code %d", utility, dumper.returncode)
return
try:
if imgoutput == 'stdout':
image = dumper_out
elif imgoutput == 'file':
with open(imgfilename, 'rb') as imgfile:
image = imgfile.read()
except IOError, err:
self.log.info("Failed to read image from %s", imgoutput)
import base64
encoded = base64.b64encode(image)
self.log.info("SCREENSHOT: data:image/png;base64,%s", encoded)
def killAndGetStack(self, proc, utilityPath, debuggerInfo):
"""Kill the process, preferrably in a way that gets us a stack trace."""
if not debuggerInfo:
if self.haveDumpedScreen:
self.log.info("Not taking screenshot here: see the one that was previously logged")
else:
self.dumpScreen(utilityPath)
if self.CRASHREPORTER and not debuggerInfo:
if self.UNIXISH:
# ABRT will get picked up by Breakpad's signal handler
os.kill(proc.pid, signal.SIGABRT)
return
elif self.IS_WIN32:
# We should have a "crashinject" program in our utility path
crashinject = os.path.normpath(os.path.join(utilityPath, "crashinject.exe"))
if os.path.exists(crashinject) and subprocess.Popen([crashinject, str(proc.pid)]).wait() == 0:
return
#TODO: kill the process such that it triggers Breakpad on OS X (bug 525296)
self.log.info("Can't trigger Breakpad, just killing process")
proc.kill()
def waitForFinish(self, proc, utilityPath, timeout, maxTime, startTime, debuggerInfo, symbolsPath):
""" Look for timeout or crashes and return the status after the process terminates """
stackFixerProcess = None
stackFixerFunction = None
didTimeout = False
hitMaxTime = False
if proc.stdout is None:
self.log.info("TEST-INFO: Not logging stdout or stderr due to debugger connection")
else:
logsource = proc.stdout
if self.IS_DEBUG_BUILD and (self.IS_MAC or self.IS_LINUX) and symbolsPath and os.path.exists(symbolsPath):
# Run each line through a function in fix_stack_using_bpsyms.py (uses breakpad symbol files)
# This method is preferred for Tinderbox builds, since native symbols may have been stripped.
sys.path.insert(0, utilityPath)
import fix_stack_using_bpsyms as stackFixerModule
stackFixerFunction = lambda line: stackFixerModule.fixSymbols(line, symbolsPath)
del sys.path[0]
elif self.IS_DEBUG_BUILD and self.IS_MAC and False:
# Run each line through a function in fix_macosx_stack.py (uses atos)
sys.path.insert(0, utilityPath)
import fix_macosx_stack as stackFixerModule
stackFixerFunction = lambda line: stackFixerModule.fixSymbols(line)
del sys.path[0]
elif self.IS_DEBUG_BUILD and self.IS_LINUX:
# Run logsource through fix-linux-stack.pl (uses addr2line)
# This method is preferred for developer machines, so we don't have to run "make buildsymbols".
stackFixerProcess = self.Process([self.PERL, os.path.join(utilityPath, "fix-linux-stack.pl")],
stdin=logsource,
stdout=subprocess.PIPE)
logsource = stackFixerProcess.stdout
(line, didTimeout) = self.readWithTimeout(logsource, timeout)
while line != "" and not didTimeout:
if stackFixerFunction:
line = stackFixerFunction(line)
self.log.info(line.rstrip().decode("UTF-8", "ignore"))
if "TEST-START" in line and "|" in line:
self.lastTestSeen = line.split("|")[1].strip()
if not debuggerInfo and "TEST-UNEXPECTED-FAIL" in line and "Test timed out" in line:
if self.haveDumpedScreen:
self.log.info("Not taking screenshot here: see the one that was previously logged")
else:
self.dumpScreen(utilityPath)
(line, didTimeout) = self.readWithTimeout(logsource, timeout)
if not hitMaxTime and maxTime and datetime.now() - startTime > timedelta(seconds = maxTime):
# Kill the application, but continue reading from stack fixer so as not to deadlock on stackFixerProcess.wait().
hitMaxTime = True
self.log.info("TEST-UNEXPECTED-FAIL | %s | application ran for longer than allowed maximum time of %d seconds", self.lastTestSeen, int(maxTime))
self.killAndGetStack(proc, utilityPath, debuggerInfo)
if didTimeout:
self.log.info("TEST-UNEXPECTED-FAIL | %s | application timed out after %d seconds with no output", self.lastTestSeen, int(timeout))
self.killAndGetStack(proc, utilityPath, debuggerInfo)
status = proc.wait()
if status == 0:
self.lastTestSeen = "Main app process exited normally"
if status != 0 and not didTimeout and not hitMaxTime:
self.log.info("TEST-UNEXPECTED-FAIL | %s | Exited with code %d during test run", self.lastTestSeen, status)
if stackFixerProcess is not None:
fixerStatus = stackFixerProcess.wait()
if fixerStatus != 0 and not didTimeout and not hitMaxTime:
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Stack fixer process exited with code %d during test run", fixerStatus)
return status
def buildCommandLine(self, app, debuggerInfo, profileDir, testURL, extraArgs):
""" build the application command line """
cmd = os.path.abspath(app)
if self.IS_MAC and not self.IS_CAMINO and os.path.exists(cmd + "-bin"):
# Prefer 'app-bin' in case 'app' is a shell script.
# We can remove this hack once bug 673899 etc are fixed.
cmd += "-bin"
args = []
if debuggerInfo:
args.extend(debuggerInfo["args"])
args.append(cmd)
cmd = os.path.abspath(debuggerInfo["path"])
if self.IS_MAC:
args.append("-foreground")
if self.IS_CYGWIN:
profileDirectory = commands.getoutput("cygpath -w \"" + profileDir + "/\"")
else:
profileDirectory = profileDir + "/"
args.extend(("-no-remote", "-profile", profileDirectory))
if testURL is not None:
if self.IS_CAMINO:
args.extend(("-url", testURL))
else:
args.append((testURL))
args.extend(extraArgs)
return cmd, args
def checkForZombies(self, processLog):
""" Look for hung processes """
if not os.path.exists(processLog):
self.log.info('Automation Error: PID log not found: %s', processLog)
# Whilst no hung process was found, the run should still display as a failure
return True
foundZombie = False
self.log.info('INFO | automation.py | Reading PID log: %s', processLog)
processList = []
pidRE = re.compile(r'launched child process (\d+)$')
processLogFD = open(processLog)
for line in processLogFD:
self.log.info(line.rstrip())
m = pidRE.search(line)
if m:
processList.append(int(m.group(1)))
processLogFD.close()
for processPID in processList:
self.log.info("INFO | automation.py | Checking for orphan process with PID: %d", processPID)
if self.isPidAlive(processPID):
foundZombie = True
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | child process %d still alive after shutdown", processPID)
self.killPid(processPID)
return foundZombie
def checkForCrashes(self, profileDir, symbolsPath):
return automationutils.checkForCrashes(os.path.join(profileDir, "minidumps"), symbolsPath, self.lastTestSeen)
def runApp(self, testURL, env, app, profileDir, extraArgs,
runSSLTunnel = False, utilityPath = None,
xrePath = None, certPath = None,
debuggerInfo = None, symbolsPath = None,
timeout = -1, maxTime = None, onLaunch = None):
"""
Run the app, log the duration it took to execute, return the status code.
Kills the app if it runs for longer than |maxTime| seconds, or outputs nothing for |timeout| seconds.
"""
if utilityPath == None:
utilityPath = self.DIST_BIN
if xrePath == None:
xrePath = self.DIST_BIN
if certPath == None:
certPath = self.CERTS_SRC_DIR
if timeout == -1:
timeout = self.DEFAULT_TIMEOUT
# copy env so we don't munge the caller's environment
env = dict(env);
env["NO_EM_RESTART"] = "1"
tmpfd, processLog = tempfile.mkstemp(suffix='pidlog')
os.close(tmpfd)
env["MOZ_PROCESS_LOG"] = processLog
if self.IS_TEST_BUILD and runSSLTunnel:
# create certificate database for the profile
certificateStatus = self.fillCertificateDB(profileDir, certPath, utilityPath, xrePath)
if certificateStatus != 0:
self.log.info("TEST-UNEXPECTED-FAIL | automation.py | Certificate integration failed")
return certificateStatus
# start ssltunnel to provide https:// URLs capability
ssltunnel = os.path.join(utilityPath, "ssltunnel" + self.BIN_SUFFIX)
ssltunnelProcess = self.Process([ssltunnel,
os.path.join(profileDir, "ssltunnel.cfg")],
env = self.environment(xrePath = xrePath))
self.log.info("INFO | automation.py | SSL tunnel pid: %d", ssltunnelProcess.pid)
cmd, args = self.buildCommandLine(app, debuggerInfo, profileDir, testURL, extraArgs)
startTime = datetime.now()
if debuggerInfo and debuggerInfo["interactive"]:
# If an interactive debugger is attached, don't redirect output,
# don't use timeouts, and don't capture ctrl-c.
timeout = None
maxTime = None
outputPipe = None
signal.signal(signal.SIGINT, lambda sigid, frame: None)
else:
outputPipe = subprocess.PIPE
self.lastTestSeen = "automation.py"
proc = self.Process([cmd] + args,
env = self.environment(env, xrePath = xrePath,
crashreporter = not debuggerInfo),
stdout = outputPipe,
stderr = subprocess.STDOUT)
self.log.info("INFO | automation.py | Application pid: %d", proc.pid)
if onLaunch is not None:
# Allow callers to specify an onLaunch callback to be fired after the
# app is launched.
onLaunch()
status = self.waitForFinish(proc, utilityPath, timeout, maxTime, startTime, debuggerInfo, symbolsPath)
self.log.info("INFO | automation.py | Application ran for: %s", str(datetime.now() - startTime))
# Do a final check for zombie child processes.
zombieProcesses = self.checkForZombies(processLog)
crashed = self.checkForCrashes(profileDir, symbolsPath)
if crashed or zombieProcesses:
status = 1
if os.path.exists(processLog):
os.unlink(processLog)
if self.IS_TEST_BUILD and runSSLTunnel:
ssltunnelProcess.kill()
return status
def getExtensionIDFromRDF(self, rdfSource):
"""
Retrieves the extension id from an install.rdf file (or string).
"""
from xml.dom.minidom import parse, parseString, Node
if isinstance(rdfSource, file):
document = parse(rdfSource)
else:
document = parseString(rdfSource)
# Find the <em:id> element. There can be multiple <em:id> tags
# within <em:targetApplication> tags, so we have to check this way.
for rdfChild in document.documentElement.childNodes:
if rdfChild.nodeType == Node.ELEMENT_NODE and rdfChild.tagName == "Description":
for descChild in rdfChild.childNodes:
if descChild.nodeType == Node.ELEMENT_NODE and descChild.tagName == "em:id":
return descChild.childNodes[0].data
return None
def installExtension(self, extensionSource, profileDir, extensionID = None):
"""
Copies an extension into the extensions directory of the given profile.
extensionSource - the source location of the extension files. This can be either
a directory or a path to an xpi file.
profileDir - the profile directory we are copying into. We will create the
"extensions" directory there if it doesn't exist.
extensionID - the id of the extension to be used as the containing directory for the
extension, if extensionSource is a directory, i.e.
this is the name of the folder in the <profileDir>/extensions/<extensionID>
"""
if not os.path.isdir(profileDir):
self.log.info("INFO | automation.py | Cannot install extension, invalid profileDir at: %s", profileDir)
return
installRDFFilename = "install.rdf"
extensionsRootDir = os.path.join(profileDir, "extensions", "staged")
if not os.path.isdir(extensionsRootDir):
os.makedirs(extensionsRootDir)
if os.path.isfile(extensionSource):
reader = automationutils.ZipFileReader(extensionSource)
for filename in reader.namelist():
# Sanity check the zip file.
if os.path.isabs(filename):
self.log.info("INFO | automation.py | Cannot install extension, bad files in xpi")
return
# We may need to dig the extensionID out of the zip file...
if extensionID is None and filename == installRDFFilename:
extensionID = self.getExtensionIDFromRDF(reader.read(filename))
# We must know the extensionID now.
if extensionID is None:
self.log.info("INFO | automation.py | Cannot install extension, missing extensionID")
return
# Make the extension directory.
extensionDir = os.path.join(extensionsRootDir, extensionID)
os.mkdir(extensionDir)
# Extract all files.
reader.extractall(extensionDir)
elif os.path.isdir(extensionSource):
if extensionID is None:
filename = os.path.join(extensionSource, installRDFFilename)
if os.path.isfile(filename):
with open(filename, "r") as installRDF:
extensionID = self.getExtensionIDFromRDF(installRDF)
if extensionID is None:
self.log.info("INFO | automation.py | Cannot install extension, missing extensionID")
return
# Copy extension tree into its own directory.
# "destination directory must not already exist".
shutil.copytree(extensionSource, os.path.join(extensionsRootDir, extensionID))
else:
self.log.info("INFO | automation.py | Cannot install extension, invalid extensionSource at: %s", extensionSource)
def elf_arm(self, filename):
data = open(filename, 'rb').read(20)
return data[:4] == "\x7fELF" and ord(data[18]) == 40 # EM_ARM