Bug 1413687 - Remove hazard builds from mozharness; r=sfink

According to sfink, mozharness is no longer used to drive hazard
builds. That means a lot of dead code that can be removed.

After this commit, there are no more references to "hazard" or
"spidermonkey" in testing/mozharness.

MozReview-Commit-ID: 8MWl8dMwRTD

--HG--
extra : rebase_source : 2156fbd13dffb22bb08b10fec2a66a9eebde8d57
This commit is contained in:
Gregory Szorc 2017-11-06 11:06:09 -08:00
parent 29dc336611
commit 389d950c4c
8 changed files with 0 additions and 883 deletions

View File

@ -1,4 +0,0 @@
config = {
'build_command': "build.browser",
'expect_file': "expect.browser.json",
}

View File

@ -1,4 +0,0 @@
config = {
'build_command': "build.shell",
'expect_file': "expect.shell.json",
}

View File

@ -1,103 +0,0 @@
import os
HG_SHARE_BASE_DIR = "/builds/hg-shared"
PYTHON_DIR = "/tools/python27"
SRCDIR = "source"
config = {
"platform": "linux64",
"build_type": "br-haz",
"log_name": "hazards",
"shell-objdir": "obj-opt-js",
"analysis-dir": "analysis",
"analysis-objdir": "obj-analyzed",
"srcdir": SRCDIR,
"analysis-scriptdir": "js/src/devtools/rootAnalysis",
# These paths are relative to the tooltool checkout location
"sixgill": "sixgill/usr/libexec/sixgill",
"sixgill_bin": "sixgill/usr/bin",
"python": "python",
"exes": {
'gittool.py': '%(abs_tools_dir)s/buildfarm/utils/gittool.py',
'tooltool.py': '/tools/tooltool.py',
"virtualenv": "/tools/virtualenv/bin/virtualenv",
},
"force_clobber": True,
'vcs_share_base': HG_SHARE_BASE_DIR,
"repos": [{
"repo": "https://hg.mozilla.org/build/tools",
"branch": "default",
"dest": "tools"
}],
"upload_remote_baseuri": 'https://ftp-ssl.mozilla.org/',
"default_blob_upload_servers": [
"https://blobupload.elasticbeanstalk.com",
],
"blob_uploader_auth_file": os.path.join(os.getcwd(), "oauth.txt"),
"virtualenv_path": '%s/venv' % os.getcwd(),
'tools_dir': "/tools",
'compiler_manifest': "build/gcc.manifest",
'sixgill_manifest': "build/sixgill.manifest",
# Mock.
"mock_packages": [
"autoconf213", "mozilla-python27-mercurial", "ccache",
"zip", "zlib-devel", "glibc-static",
"openssh-clients", "mpfr", "wget", "rsync",
# For building the JS shell
"gmp-devel", "nspr", "nspr-devel",
# For building the browser
"dbus-devel", "dbus-glib-devel", "hal-devel",
"libICE-devel", "libIDL-devel",
# For mach resource-usage
"python-psutil",
'zip', 'git',
'libstdc++-static', 'perl-Test-Simple', 'perl-Config-General',
'gtk2-devel', 'libnotify-devel', 'yasm',
'alsa-lib-devel', 'libcurl-devel',
'wireless-tools-devel', 'libX11-devel',
'libXt-devel', 'mesa-libGL-devel',
'gnome-vfs2-devel', 'GConf2-devel', 'wget',
'mpfr', # required for system compiler
'xorg-x11-font*', # fonts required for PGO
'imake', # required for makedepend!?!
'pulseaudio-libs-devel',
'freetype-2.3.11-6.el6_1.8.x86_64',
'freetype-devel-2.3.11-6.el6_1.8.x86_64',
'gstreamer-devel', 'gstreamer-plugins-base-devel',
],
"mock_files": [
("/home/cltbld/.ssh", "/home/mock_mozilla/.ssh"),
('/home/cltbld/.hgrc', '/builds/.hgrc'),
('/builds/relengapi.tok', '/builds/relengapi.tok'),
("/tools/tooltool.py", "/tools/tooltool.py"),
('/usr/local/lib/hgext', '/usr/local/lib/hgext'),
],
"env_replacements": {
"pythondir": PYTHON_DIR,
"gccdir": "%(abs_work_dir)s/gcc",
"sixgilldir": "%(abs_work_dir)s/sixgill",
},
"partial_env": {
"PATH": "%(pythondir)s/bin:%(gccdir)s/bin:%(PATH)s",
"LD_LIBRARY_PATH": "%(sixgilldir)s/usr/lib64",
# Suppress the mercurial-setup check. When running in automation, this
# is redundant with MOZ_AUTOMATION, but a local developer-mode build
# will have the mach state directory set to a nonstandard location and
# therefore will always claim that mercurial-setup has not been run.
"I_PREFER_A_SUBOPTIMAL_MERCURIAL_EXPERIENCE": "1",
},
}

View File

@ -1,38 +0,0 @@
# This config file is for locally testing spidermonkey_build.py. It provides
# the values that would otherwise be provided by buildbot.
BRANCH = "local-src"
HOME = "/home/sfink"
REPO = HOME + "/src/MI-GC"
config = {
"hgurl": "https://hg.mozilla.org/",
"python": "python",
"sixgill": HOME + "/src/sixgill",
"sixgill_bin": HOME + "/src/sixgill/bin",
"repo": REPO,
"repos": [{
"repo": REPO,
"branch": "default",
"dest": BRANCH,
}, {
"repo": "https://hg.mozilla.org/build/tools",
"branch": "default",
"dest": "tools"
}],
"tools_dir": "/tools",
"mock_target": "mozilla-centos6-x86_64",
"upload_remote_basepath": "/tmp/upload-base",
"upload_ssh_server": "localhost",
"upload_ssh_key": "/home/sfink/.ssh/id_rsa",
"upload_ssh_user": "sfink",
"upload_label": "linux64-br-haz",
# For testing tryserver uploads (directory structure is different)
#"branch": "try",
#"revision": "deadbeef1234",
}

View File

@ -13,6 +13,5 @@ scripts
mobile_l10n.rst
mobile_partner_repack.rst
multil10n.rst
spidermonkey_build.rst
talos_script.rst
web_platform_tests.rst

View File

@ -1,7 +0,0 @@
spidermonkey_build module
=========================
.. automodule:: spidermonkey_build
:members:
:undoc-members:
:show-inheritance:

View File

@ -1,241 +0,0 @@
import os
import json
import re
from mozharness.base.errors import MakefileErrorList
from mozharness.mozilla.buildbot import TBPL_WARNING
class HazardError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
# Logging ends up calling splitlines directly on what is being logged, which would fail.
def splitlines(self):
return str(self).splitlines()
class HazardAnalysis(object):
def clobber_shell(self, builder):
"""Clobber the specially-built JS shell used to run the analysis"""
dirs = builder.query_abs_dirs()
builder.rmtree(dirs['shell_objdir'])
def configure_shell(self, builder):
"""Configure the specially-built JS shell used to run the analysis"""
dirs = builder.query_abs_dirs()
if not os.path.exists(dirs['shell_objdir']):
builder.mkdir_p(dirs['shell_objdir'])
js_src_dir = os.path.join(dirs['gecko_src'], 'js', 'src')
rc = builder.run_command(['autoconf-2.13'],
cwd=js_src_dir,
env=builder.env,
error_list=MakefileErrorList)
if rc != 0:
rc = builder.run_command(['autoconf2.13'],
cwd=js_src_dir,
env=builder.env,
error_list=MakefileErrorList)
if rc != 0:
raise HazardError("autoconf failed, can't continue.")
rc = builder.run_command([os.path.join(js_src_dir, 'configure'),
'--enable-optimize',
'--disable-debug',
'--enable-ctypes',
'--with-system-nspr',
'--without-intl-api'],
cwd=dirs['shell_objdir'],
env=builder.env,
error_list=MakefileErrorList)
if rc != 0:
raise HazardError("Configure failed, can't continue.")
def build_shell(self, builder):
"""Build a JS shell specifically for running the analysis"""
dirs = builder.query_abs_dirs()
rc = builder.run_command(['make', '-j', str(builder.config.get('concurrency', 4)), '-s'],
cwd=dirs['shell_objdir'],
env=builder.env,
error_list=MakefileErrorList)
if rc != 0:
raise HazardError("Build failed, can't continue.")
def clobber(self, builder):
"""Clobber all of the old analysis data. Note that theoretically we could do
incremental analyses, but they seem to still be buggy."""
dirs = builder.query_abs_dirs()
builder.rmtree(dirs['abs_analysis_dir'])
builder.rmtree(dirs['abs_analyzed_objdir'])
def setup(self, builder):
"""Prepare the config files and scripts for running the analysis"""
dirs = builder.query_abs_dirs()
analysis_dir = dirs['abs_analysis_dir']
if not os.path.exists(analysis_dir):
builder.mkdir_p(analysis_dir)
js_src_dir = os.path.join(dirs['gecko_src'], 'js', 'src')
values = {
'js': os.path.join(dirs['shell_objdir'], 'dist', 'bin', 'js'),
'analysis_scriptdir': os.path.join(js_src_dir, 'devtools', 'rootAnalysis'),
'source_objdir': dirs['abs_analyzed_objdir'],
'source': os.path.join(dirs['abs_work_dir'], 'source'),
'sixgill': os.path.join(dirs['abs_work_dir'], builder.config['sixgill']),
'sixgill_bin': os.path.join(dirs['abs_work_dir'], builder.config['sixgill_bin']),
'gcc_bin': os.path.join(dirs['abs_work_dir'], 'gcc'),
}
defaults = """
js = '%(js)s'
analysis_scriptdir = '%(analysis_scriptdir)s'
objdir = '%(source_objdir)s'
source = '%(source)s'
sixgill = '%(sixgill)s'
sixgill_bin = '%(sixgill_bin)s'
gcc_bin = '%(gcc_bin)s'
jobs = 4
""" % values
defaults_path = os.path.join(analysis_dir, 'defaults.py')
file(defaults_path, "w").write(defaults)
builder.log("Wrote analysis config file " + defaults_path)
build_script = builder.config['build_command']
builder.copyfile(os.path.join(dirs['mozharness_scriptdir'],
os.path.join('spidermonkey', build_script)),
os.path.join(analysis_dir, build_script),
copystat=True)
def run(self, builder, env, error_list):
"""Execute the analysis, which consists of building all analyzed
source code with a GCC plugin active that siphons off the interesting
data, then running some JS scripts over the databases created by
the plugin."""
dirs = builder.query_abs_dirs()
analysis_dir = dirs['abs_analysis_dir']
analysis_scriptdir = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'])
build_script = builder.config['build_command']
build_script = os.path.abspath(os.path.join(analysis_dir, build_script))
cmd = [
builder.config['python'],
os.path.join(analysis_scriptdir, 'analyze.py'),
"--source", dirs['gecko_src'],
"--buildcommand", build_script,
]
retval = builder.run_command(cmd,
cwd=analysis_dir,
env=env,
error_list=error_list)
if retval != 0:
raise HazardError("failed to build")
def collect_output(self, builder):
"""Gather up the analysis output and place in the upload dir."""
dirs = builder.query_abs_dirs()
analysis_dir = dirs['abs_analysis_dir']
upload_dir = dirs['abs_blob_upload_dir']
builder.mkdir_p(upload_dir)
files = (('rootingHazards.txt',
'rooting_hazards',
'list of rooting hazards, unsafe references, and extra roots'),
('gcFunctions.txt',
'gcFunctions',
'list of functions that can gc, and why'),
('allFunctions.txt',
'allFunctions',
'list of all functions that were compiled'),
('gcTypes.txt',
'gcTypes',
'list of types containing unrooted gc pointers'),
('unnecessary.txt',
'extra',
'list of extra roots (rooting with no GC function in scope)'),
('refs.txt',
'refs',
'list of unsafe references to unrooted pointers'),
('hazards.txt',
'hazards',
'list of just the hazards, together with gcFunction reason for each'))
for f, short, long in files:
builder.copy_to_upload_dir(os.path.join(analysis_dir, f),
short_desc=short,
long_desc=long,
compress=False, # blobber will compress
upload_dir=upload_dir)
print("== Hazards (temporarily inline here, beware weirdly interleaved output, see bug 1211402) ==")
print(file(os.path.join(analysis_dir, "hazards.txt")).read())
def upload_results(self, builder):
"""Upload the results of the analysis."""
pass
def check_expectations(self, builder):
"""Compare the actual to expected number of problems."""
if 'expect_file' not in builder.config:
builder.info('No expect_file given; skipping comparison with expected hazard count')
return
dirs = builder.query_abs_dirs()
analysis_dir = dirs['abs_analysis_dir']
analysis_scriptdir = os.path.join(dirs['gecko_src'], 'js', 'src', 'devtools', 'rootAnalysis')
expect_file = os.path.join(analysis_scriptdir, builder.config['expect_file'])
expect = builder.read_from_file(expect_file)
if expect is None:
raise HazardError("could not load expectation file")
data = json.loads(expect)
num_hazards = 0
num_refs = 0
with builder.opened(os.path.join(analysis_dir, "rootingHazards.txt")) as (hazards_fh, err):
if err:
raise HazardError("hazards file required")
for line in hazards_fh:
m = re.match(r"^Function.*has unrooted.*live across GC call", line)
if m:
num_hazards += 1
m = re.match(r'^Function.*takes unsafe address of unrooted', line)
if m:
num_refs += 1
expect_hazards = data.get('expect-hazards')
status = []
if expect_hazards is None:
status.append("%d hazards" % num_hazards)
else:
status.append("%d/%d hazards allowed" % (num_hazards, expect_hazards))
if expect_hazards is not None and expect_hazards != num_hazards:
if expect_hazards < num_hazards:
builder.warning("TEST-UNEXPECTED-FAIL %d more hazards than expected (expected %d, saw %d)" %
(num_hazards - expect_hazards, expect_hazards, num_hazards))
builder.buildbot_status(TBPL_WARNING)
else:
builder.info("%d fewer hazards than expected! (expected %d, saw %d)" %
(expect_hazards - num_hazards, expect_hazards, num_hazards))
expect_refs = data.get('expect-refs')
if expect_refs is None:
status.append("%d unsafe refs" % num_refs)
else:
status.append("%d/%d unsafe refs allowed" % (num_refs, expect_refs))
if expect_refs is not None and expect_refs != num_refs:
if expect_refs < num_refs:
builder.warning("TEST-UNEXPECTED-FAIL %d more unsafe refs than expected (expected %d, saw %d)" %
(num_refs - expect_refs, expect_refs, num_refs))
builder.buildbot_status(TBPL_WARNING)
else:
builder.info("%d fewer unsafe refs than expected! (expected %d, saw %d)" %
(expect_refs - num_refs, expect_refs, num_refs))
builder.info("TinderboxPrint: " + ", ".join(status))

View File

@ -1,485 +0,0 @@
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import sys
import copy
from datetime import datetime
from functools import wraps
sys.path.insert(1, os.path.dirname(sys.path[0]))
from mozharness.base.errors import MakefileErrorList
from mozharness.base.script import BaseScript
from mozharness.base.transfer import TransferMixin
from mozharness.base.vcs.vcsbase import VCSMixin
from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
from mozharness.mozilla.buildbot import BuildbotMixin
from mozharness.mozilla.building.hazards import HazardError, HazardAnalysis
from mozharness.mozilla.purge import PurgeMixin
from mozharness.mozilla.mock import MockMixin
from mozharness.mozilla.tooltool import TooltoolMixin
SUCCESS, WARNINGS, FAILURE, EXCEPTION, RETRY = xrange(5)
def requires(*queries):
"""Wrapper for detecting problems where some bit of information
required by the wrapped step is unavailable. Use it put prepending
@requires("foo"), which will check whether self.query_foo() returns
something useful."""
def make_wrapper(f):
@wraps(f)
def wrapper(self, *args, **kwargs):
for query in queries:
val = query(self)
goodval = not (val is None or "None" in str(val))
assert goodval, f.__name__ + " requires " + query.__name__ + " to return a value"
return f(self, *args, **kwargs)
return wrapper
return make_wrapper
nuisance_env_vars = ['TERMCAP', 'LS_COLORS', 'PWD', '_']
class SpidermonkeyBuild(MockMixin,
PurgeMixin, BaseScript,
VCSMixin, BuildbotMixin, TooltoolMixin, TransferMixin, BlobUploadMixin):
config_options = [
[["--repo"], {
"dest": "repo",
"help": "which gecko repo to get spidermonkey from",
}],
[["--source"], {
"dest": "source",
"help": "directory containing gecko source tree (instead of --repo)",
}],
[["--revision"], {
"dest": "revision",
}],
[["--branch"], {
"dest": "branch",
}],
[["--vcs-share-base"], {
"dest": "vcs_share_base",
"help": "base directory for shared repositories",
}],
[["-j"], {
"dest": "concurrency",
"type": int,
"default": 4,
"help": "number of simultaneous jobs used while building the shell " +
"(currently ignored for the analyzed build",
}] + copy.deepcopy(blobupload_config_options)
]
def __init__(self):
super(SpidermonkeyBuild, self).__init__(
config_options=self.config_options,
# other stuff
all_actions=[
'purge',
'checkout-tools',
# First, build an optimized JS shell for running the analysis
'checkout-source',
'get-blobs',
'clobber-shell',
'configure-shell',
'build-shell',
# Next, build a tree with the analysis plugin active. Note that
# we are using the same checkout for the JS shell build and the
# build of the source to be analyzed, which is a little
# unnecessary (no need to rebuild the JS shell all the time).
# (Different objdir, though.)
'clobber-analysis',
'setup-analysis',
'run-analysis',
'collect-analysis-output',
'upload-analysis',
'check-expectations',
],
default_actions=[
'purge',
'checkout-tools',
'checkout-source',
'get-blobs',
'clobber-shell',
'configure-shell',
'build-shell',
'clobber-analysis',
'setup-analysis',
'run-analysis',
'collect-analysis-output',
# Temporarily disabled, see bug 1211402
# 'upload-analysis',
'check-expectations',
],
config={
'default_vcs': 'hg',
'vcs_share_base': os.environ.get('HG_SHARE_BASE_DIR'),
'ccache': True,
'buildbot_json_path': os.environ.get('PROPERTIES_FILE'),
'tools_repo': 'https://hg.mozilla.org/build/tools',
'upload_ssh_server': None,
'upload_remote_basepath': None,
'enable_try_uploads': True,
'source': None,
'stage_product': 'firefox',
},
)
self.buildid = None
self.create_virtualenv()
self.analysis = HazardAnalysis()
def _pre_config_lock(self, rw_config):
if self.config['source']:
self.config['srcdir'] = self.config['source']
super(SpidermonkeyBuild, self)._pre_config_lock(rw_config)
if self.buildbot_config is None:
self.info("Reading buildbot build properties...")
self.read_buildbot_config()
if self.buildbot_config:
bb_props = [('mock_target', 'mock_target', None),
('hgurl', 'hgurl', None),
('clobberer_url', 'clobberer_url',
'https://api.pub.build.mozilla.org/clobberer/lastclobber'),
('force_clobber', 'force_clobber', None),
('branch', 'blob_upload_branch', None),
]
buildbot_props = self.buildbot_config.get('properties', {})
for bb_prop, cfg_prop, default in bb_props:
if not self.config.get(cfg_prop) and buildbot_props.get(bb_prop, default):
self.config[cfg_prop] = buildbot_props.get(bb_prop, default)
self.config['is_automation'] = True
else:
self.config['is_automation'] = False
self.config.setdefault('blob_upload_branch', 'devel')
dirs = self.query_abs_dirs()
replacements = self.config['env_replacements'].copy()
for k, v in replacements.items():
replacements[k] = v % dirs
self.env = self.query_env(replace_dict=replacements,
partial_env=self.config['partial_env'],
purge_env=nuisance_env_vars)
self.env['MOZ_UPLOAD_DIR'] = dirs['abs_blob_upload_dir']
self.env['TOOLTOOL_DIR'] = dirs['abs_work_dir']
def query_abs_dirs(self):
if self.abs_dirs:
return self.abs_dirs
abs_dirs = BaseScript.query_abs_dirs(self)
abs_work_dir = abs_dirs['abs_work_dir']
dirs = {
'shell_objdir':
os.path.join(abs_work_dir, self.config['shell-objdir']),
'mozharness_scriptdir':
os.path.abspath(os.path.dirname(__file__)),
'abs_analysis_dir':
os.path.join(abs_work_dir, self.config['analysis-dir']),
'abs_analyzed_objdir':
os.path.join(abs_work_dir, self.config['srcdir'], self.config['analysis-objdir']),
'analysis_scriptdir':
os.path.join(self.config['srcdir'], self.config['analysis-scriptdir']),
'abs_tools_dir':
os.path.join(abs_dirs['base_work_dir'], 'tools'),
'gecko_src':
os.path.join(abs_work_dir, self.config['srcdir']),
'abs_blob_upload_dir':
os.path.join(abs_work_dir, 'blobber_upload_dir'),
}
abs_dirs.update(dirs)
self.abs_dirs = abs_dirs
return self.abs_dirs
def query_repo(self):
if self.config.get('repo'):
return self.config['repo']
elif self.buildbot_config and 'properties' in self.buildbot_config:
return self.config['hgurl'] + self.buildbot_config['properties']['repo_path']
else:
return None
def query_revision(self):
if 'revision' in self.buildbot_properties:
revision = self.buildbot_properties['revision']
elif self.buildbot_config and 'sourcestamp' in self.buildbot_config:
revision = self.buildbot_config['sourcestamp']['revision']
else:
# Useful for local testing. In actual use, this would always be
# None.
revision = self.config.get('revision')
return revision
def query_branch(self):
if self.buildbot_config and 'properties' in self.buildbot_config:
return self.buildbot_config['properties']['branch']
elif 'branch' in self.config:
# Used for locally testing try vs non-try
return self.config['branch']
else:
return os.path.basename(self.query_repo())
def query_compiler_manifest(self):
dirs = self.query_abs_dirs()
manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'],
self.config['compiler_manifest'])
if os.path.exists(manifest):
return manifest
return os.path.join(dirs['abs_work_dir'], self.config['compiler_manifest'])
def query_sixgill_manifest(self):
dirs = self.query_abs_dirs()
manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'],
self.config['sixgill_manifest'])
if os.path.exists(manifest):
return manifest
return os.path.join(dirs['abs_work_dir'], self.config['sixgill_manifest'])
def query_buildid(self):
if self.buildid:
return self.buildid
if self.buildbot_config and 'properties' in self.buildbot_config:
self.buildid = self.buildbot_config['properties'].get('buildid')
if not self.buildid:
self.buildid = datetime.now().strftime("%Y%m%d%H%M%S")
return self.buildid
def query_upload_ssh_server(self):
if self.buildbot_config and 'properties' in self.buildbot_config:
return self.buildbot_config['properties']['upload_ssh_server']
else:
return self.config['upload_ssh_server']
def query_upload_ssh_key(self):
if self.buildbot_config and 'properties' in self.buildbot_config:
key = self.buildbot_config['properties']['upload_ssh_key']
else:
key = self.config['upload_ssh_key']
if self.mock_enabled and not key.startswith("/"):
key = "/home/mock_mozilla/.ssh/" + key
return key
def query_upload_ssh_user(self):
if self.buildbot_config and 'properties' in self.buildbot_config:
return self.buildbot_config['properties']['upload_ssh_user']
else:
return self.config['upload_ssh_user']
def query_product(self):
if self.buildbot_config and 'properties' in self.buildbot_config:
return self.buildbot_config['properties']['product']
else:
return self.config['product']
def query_upload_remote_basepath(self):
if self.config.get('upload_remote_basepath'):
return self.config['upload_remote_basepath']
else:
return "/pub/mozilla.org/{product}".format(
product=self.query_product(),
)
def query_upload_remote_baseuri(self):
baseuri = self.config.get('upload_remote_baseuri')
if self.buildbot_config and 'properties' in self.buildbot_config:
buildprops = self.buildbot_config['properties']
if 'upload_remote_baseuri' in buildprops:
baseuri = buildprops['upload_remote_baseuri']
return baseuri.strip("/") if baseuri else None
def query_target(self):
if self.buildbot_config and 'properties' in self.buildbot_config:
return self.buildbot_config['properties']['platform']
else:
return self.config.get('target')
def query_upload_path(self):
branch = self.query_branch()
common = {
'basepath': self.query_upload_remote_basepath(),
'branch': branch,
'target': self.query_target(),
}
if branch == 'try':
if not self.config['enable_try_uploads']:
return None
try:
user = self.buildbot_config['sourcestamp']['changes'][0]['who']
except (KeyError, TypeError):
user = "unknown"
return "{basepath}/try-builds/{user}-{rev}/{branch}-{target}".format(
user=user,
rev=self.query_revision(),
**common
)
else:
return "{basepath}/tinderbox-builds/{branch}-{target}/{buildid}".format(
buildid=self.query_buildid(),
**common
)
def query_do_upload(self):
if self.query_branch() == 'try':
return self.config.get('enable_try_uploads')
return True
# Actions {{{2
def purge(self):
dirs = self.query_abs_dirs()
self.info("purging, abs_upload_dir=" + dirs['abs_upload_dir'])
PurgeMixin.clobber(
self,
always_clobber_dirs=[
dirs['abs_upload_dir'],
],
)
def checkout_tools(self):
dirs = self.query_abs_dirs()
# If running from within a directory also passed as the --source dir,
# this has the danger of clobbering <source>/tools/
if self.config['source']:
srcdir = self.config['source']
if os.path.samefile(srcdir, os.path.dirname(dirs['abs_tools_dir'])):
raise Exception("Cannot run from source checkout to avoid overwriting subdirs")
rev = self.vcs_checkout(
vcs='hg',
branch="default",
repo=self.config['tools_repo'],
clean=False,
dest=dirs['abs_tools_dir'],
)
self.set_buildbot_property("tools_revision", rev, write_to_file=True)
def do_checkout_source(self):
# --source option means to use an existing source directory instead of checking one out.
if self.config['source']:
return
dirs = self.query_abs_dirs()
dest = dirs['gecko_src']
# Pre-create the directory to appease the share extension
if not os.path.exists(dest):
self.mkdir_p(dest)
rev = self.vcs_checkout(
repo=self.query_repo(),
dest=dest,
revision=self.query_revision(),
branch=self.config.get('branch'),
clean=True,
)
self.set_buildbot_property('source_revision', rev, write_to_file=True)
def checkout_source(self):
try:
self.do_checkout_source()
except Exception as e:
self.fatal("checkout failed: " + str(e), exit_code=RETRY)
def get_blobs(self):
work_dir = self.query_abs_dirs()['abs_work_dir']
if not os.path.exists(work_dir):
self.mkdir_p(work_dir)
self.tooltool_fetch(self.query_compiler_manifest(), output_dir=work_dir)
self.tooltool_fetch(self.query_sixgill_manifest(), output_dir=work_dir)
def clobber_shell(self):
self.analysis.clobber_shell(self)
def configure_shell(self):
self.enable_mock()
try:
self.analysis.configure_shell(self)
except HazardError as e:
self.fatal(e, exit_code=FAILURE)
self.disable_mock()
def build_shell(self):
self.enable_mock()
try:
self.analysis.build_shell(self)
except HazardError as e:
self.fatal(e, exit_code=FAILURE)
self.disable_mock()
def clobber_analysis(self):
self.analysis.clobber(self)
def setup_analysis(self):
self.analysis.setup(self)
def run_analysis(self):
self.enable_mock()
upload_dir = self.query_abs_dirs()['abs_blob_upload_dir']
if not os.path.exists(upload_dir):
self.mkdir_p(upload_dir)
env = self.env.copy()
env['MOZ_UPLOAD_DIR'] = upload_dir
try:
self.analysis.run(self, env=env, error_list=MakefileErrorList)
except HazardError as e:
self.fatal(e, exit_code=FAILURE)
self.disable_mock()
def collect_analysis_output(self):
self.analysis.collect_output(self)
def upload_analysis(self):
if not self.config['is_automation']:
return
if not self.query_do_upload():
self.info("Uploads disabled for this build. Skipping...")
return
self.enable_mock()
try:
self.analysis.upload_results(self)
except HazardError as e:
self.error(e)
self.return_code = WARNINGS
self.disable_mock()
def check_expectations(self):
try:
self.analysis.check_expectations(self)
except HazardError as e:
self.fatal(e, exit_code=FAILURE)
# main {{{1
if __name__ == '__main__':
myScript = SpidermonkeyBuild()
myScript.run_and_exit()