Backed out changeset 6fa6cfe0199c (bug 1403131) for mass reftest failures. r=backout on a CLOSED TREE

--HG--
extra : amend_source : 4d399778793d9239e5c441fe237a15d12c4f1360
This commit is contained in:
Sebastian Hengst 2017-10-23 16:39:24 +02:00
parent 3be377b84e
commit a79a507dc2
29 changed files with 402 additions and 435 deletions

View File

@ -1,3 +1,5 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from mozfile import *

View File

@ -32,7 +32,7 @@ except NameError:
WindowsError = None # so we can unconditionally catch it later...
# utilities for extracting archives
### utilities for extracting archives
def extract_tarball(src, dest):
"""extract a .tar file"""
@ -54,7 +54,7 @@ def extract_zip(src, dest):
else:
try:
bundle = zipfile.ZipFile(src)
except Exception:
except Exception, e:
print "src: %s" % src
raise
@ -118,7 +118,7 @@ def extract(src, dest=None):
return top_level_files
# utilities for removal of files and directories
### utilities for removal of files and directories
def rmtree(dir):
"""Deprecated wrapper method to remove a directory tree.
@ -179,7 +179,6 @@ def remove(path):
os.chmod(path, path_stats.st_mode | stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
_call_with_windows_retry(shutil.rmtree, path)
def depth(directory):
"""returns the integer depth of a directory or path relative to '/' """
@ -192,22 +191,20 @@ def depth(directory):
break
return level
# ASCII delimeters
ascii_delimeters = {
'vertical_line': '|',
'item_marker': '+',
'last_child': '\\'
'vertical_line' : '|',
'item_marker' : '+',
'last_child' : '\\'
}
# unicode delimiters
unicode_delimeters = {
'vertical_line': '',
'item_marker': '',
'last_child': ''
'vertical_line' : '',
'item_marker' : '',
'last_child' : ''
}
def tree(directory,
item_marker=unicode_delimeters['item_marker'],
vertical_line=unicode_delimeters['vertical_line'],
@ -234,6 +231,7 @@ def tree(directory,
resource[:] = sorted(resource, key=sort_key)
files_end = item_marker
dirpath_marker = item_marker
if level > len(indent):
indent.append(vertical_line)
@ -256,19 +254,21 @@ def tree(directory,
# append the directory and piece of tree structure
# if the top-level entry directory, print as passed
retval.append('%s%s%s' % (''.join(indent[:-1]),
dirpath_mark, basename if retval else directory))
retval.append('%s%s%s'% (''.join(indent[:-1]),
dirpath_mark,
basename if retval else directory))
# add the files
if filenames:
last_file = filenames[-1]
retval.extend([('%s%s%s' % (''.join(indent),
files_end if filename == last_file else item_marker, filename))
files_end if filename == last_file else item_marker,
filename))
for index, filename in enumerate(filenames)])
return '\n'.join(retval)
# utilities for temporary resources
### utilities for temporary resources
class NamedTemporaryFile(object):
"""
@ -340,7 +340,7 @@ def TemporaryDirectory():
shutil.rmtree(tempdir)
# utilities dealing with URLs
### utilities dealing with URLs
def is_url(thing):
"""
@ -353,7 +353,6 @@ def is_url(thing):
else:
return len(parsed[0]) >= 2
def load(resource):
"""
open a file or URL for reading. If the passed resource string is not a URL,
@ -370,3 +369,4 @@ def load(resource):
return file(resource)
return urllib2.urlopen(resource)

View File

@ -52,5 +52,5 @@ Module variables:
"""
import mozinfo
from mozinfo import *
__all__ = mozinfo.__all__

View File

@ -19,24 +19,19 @@ import mozfile
# keep a copy of the os module since updating globals overrides this
_os = os
class unknown(object):
"""marker class for unknown information"""
def __nonzero__(self):
return False
def __str__(self):
return 'UNKNOWN'
unknown = unknown() # singleton
# get system information
info = {'os': unknown,
'processor': unknown,
'version': unknown,
'bits': unknown}
'bits': unknown }
(system, node, release, version, machine, processor) = platform.uname()
(bits, linkage) = platform.architecture()
@ -107,7 +102,6 @@ def sanitize(info):
info["processor"] = "x86"
info["bits"] = 32
# method for updating information
def update(new_info):
"""
@ -130,10 +124,9 @@ def update(new_info):
for os_name in choices['os']:
globals()['is' + os_name.title()] = info['os'] == os_name
# unix is special
if isLinux or isBsd: # noqa
if isLinux or isBsd:
globals()['isUnix'] = True
def find_and_update_from_json(*dirs):
"""
Find a mozinfo.json file, load it, and update the info with the
@ -165,7 +158,6 @@ def find_and_update_from_json(*dirs):
return None
update({})
# exports
@ -180,7 +172,6 @@ __all__ += [
'find_and_update_from_json',
]
def main(args=None):
# parse the command line
@ -208,13 +199,11 @@ def main(args=None):
print '%s choices: %s' % (key, ' '.join([str(choice)
for choice in choices[key]]))
flag = True
if flag:
return
if flag: return
# otherwise, print out all info
for key, value in info.items():
print '%s: %s' % (key, value)
if __name__ == '__main__':
main()

View File

@ -12,6 +12,7 @@ import os
import re
import sys
import signal
import socket
import subprocess
import time
import tempfile
@ -31,8 +32,7 @@ from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_opt
from mozharness.mozilla.testing.unittest import EmulatorMixin
class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin, BaseScript,
MozbaseMixin):
class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin, BaseScript, MozbaseMixin):
config_options = [[
["--test-suite"],
{"action": "store",
@ -226,7 +226,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
"tmp_file": tmp_file,
}
def _retry(self, max_attempts, interval, func, description, max_time=0):
def _retry(self, max_attempts, interval, func, description, max_time = 0):
'''
Execute func until it returns True, up to max_attempts times, waiting for
interval seconds between each attempt. description is logged on each attempt.
@ -237,13 +237,12 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
status = False
attempts = 0
if max_time > 0:
end_time = datetime.datetime.now() + datetime.timedelta(seconds=max_time)
end_time = datetime.datetime.now() + datetime.timedelta(seconds = max_time)
else:
end_time = None
while attempts < max_attempts and not status:
if (end_time is not None) and (datetime.datetime.now() > end_time):
self.info("Maximum retry run-time of %d seconds exceeded; "
"remaining attempts abandoned" % max_time)
self.info("Maximum retry run-time of %d seconds exceeded; remaining attempts abandoned" % max_time)
break
if attempts != 0:
self.info("Sleeping %d seconds" % interval)
@ -291,13 +290,11 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
if not adb_ok:
self.warning('Unable to communicate with adb')
return False
adb_device_ok = self._retry(4, 30, self._verify_adb_device,
"Verify emulator visible to adb")
adb_device_ok = self._retry(4, 30, self._verify_adb_device, "Verify emulator visible to adb")
if not adb_device_ok:
self.warning('Unable to communicate with emulator via adb')
return False
boot_ok = self._retry(30, 10, self._is_boot_completed, "Verify Android boot completed",
max_time=330)
boot_ok = self._retry(30, 10, self._is_boot_completed, "Verify Android boot completed", max_time = 330)
if not boot_ok:
self.warning('Unable to verify Android boot completion')
return False
@ -322,11 +319,9 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
def _install_fennec_apk(self):
install_ok = False
if int(self.sdk_level) >= 23:
cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', '-g',
self.installer_path]
cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', '-g', self.installer_path]
else:
cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r',
self.installer_path]
cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', self.installer_path]
out = self._run_with_timeout(300, cmd, True)
if 'Success' in out:
install_ok = True
@ -335,11 +330,9 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
def _install_robocop_apk(self):
install_ok = False
if int(self.sdk_level) >= 23:
cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', '-g',
self.robocop_path]
cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', '-g', self.robocop_path]
else:
cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r',
self.robocop_path]
cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', self.robocop_path]
out = self._run_with_timeout(300, cmd, True)
if 'Success' in out:
install_ok = True
@ -462,7 +455,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
continue
if '%(app)' in option:
# only query package name if requested
cmd.extend([option % {'app': self._query_package_name()}])
cmd.extend([option % {'app' : self._query_package_name()}])
else:
cmd.extend([option % str_format_values])
@ -473,7 +466,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
try_options, try_tests = self.try_args(self.test_suite)
cmd.extend(try_options)
if self.config.get('verify') is not True:
if self.config.get('verify') != True:
cmd.extend(self.query_tests_args(
self.config["suite_definitions"][self.test_suite].get("tests"),
None,
@ -501,8 +494,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
# something unexpected!
repo = 'https://hg.mozilla.org/mozilla-central'
revision = 'default'
self.warning('Unable to find repo/revision for manifest; '
'using mozilla-central/default')
self.warning('Unable to find repo/revision for manifest; using mozilla-central/default')
url = '%s/raw-file/%s/%s' % (
repo,
revision,
@ -527,7 +519,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
cache=c.get("tooltool_cache", None))
##########################################
# Actions for AndroidEmulatorTest #
### Actions for AndroidEmulatorTest ###
##########################################
def setup_avds(self):
'''
@ -569,8 +561,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
'''
Starts the emulator
'''
if 'emulator_url' in self.config or 'emulator_manifest' in self.config or \
'tools_manifest' in self.config:
if 'emulator_url' in self.config or 'emulator_manifest' in self.config or 'tools_manifest' in self.config:
self.install_emulator()
if not os.path.isfile(self.adb_path):
@ -650,8 +641,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
'''
self.mkdir_p(self.query_abs_dirs()['abs_blob_upload_dir'])
max_restarts = 5
emulator_ok = self._retry(max_restarts, 10, self._verify_emulator_and_restart_on_fail,
"Check emulator")
emulator_ok = self._retry(max_restarts, 10, self._verify_emulator_and_restart_on_fail, "Check emulator")
if not emulator_ok:
self.fatal('INFRA-ERROR: Unable to start emulator after %d attempts' % max_restarts,
EXIT_STATUS_DICT[TBPL_RETRY])
@ -662,8 +652,8 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
# at the end of the job.
logcat_filename = 'logcat-%s.log' % self.emulator["device_id"]
logcat_path = os.path.join(self.abs_dirs['abs_blob_upload_dir'], logcat_filename)
logcat_cmd = '%s -s %s logcat -v threadtime Trace:S StrictMode:S '\
' ExchangeService:S > %s &' % (self.adb_path, self.emulator["device_id"], logcat_path)
logcat_cmd = '%s -s %s logcat -v threadtime Trace:S StrictMode:S ExchangeService:S > %s &' % \
(self.adb_path, self.emulator["device_id"], logcat_path)
self.info(logcat_cmd)
os.system(logcat_cmd)
# Get a post-boot emulator process list for diagnostics
@ -674,8 +664,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
"""
Download and extract fennec APK, tests.zip, host utils, and robocop (if required).
"""
super(AndroidEmulatorTest, self).download_and_extract(
suite_categories=self._query_suite_categories())
super(AndroidEmulatorTest, self).download_and_extract(suite_categories=self._query_suite_categories())
dirs = self.query_abs_dirs()
if self.test_suite and self.test_suite.startswith('robocop'):
robocop_url = self.installer_url[:self.installer_url.rfind('/')] + '/robocop.apk'
@ -698,33 +687,29 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
"""
Install APKs on the emulator
"""
install_needed = (not self.test_suite) or \
self.config["suite_definitions"][self.test_suite].get("install")
if install_needed is False:
install_needed = (not self.test_suite) or self.config["suite_definitions"][self.test_suite].get("install")
if install_needed == False:
self.info("Skipping apk installation for %s" % self.test_suite)
return
assert self.installer_path is not None, \
"Either add installer_path to the config or use --installer-path."
self.sdk_level = self._run_with_timeout(30, [self.adb_path, '-s',
self.emulator['device_id'],
self.sdk_level = self._run_with_timeout(30, [self.adb_path, '-s', self.emulator['device_id'],
'shell', 'getprop', 'ro.build.version.sdk'])
# Install Fennec
install_ok = self._retry(3, 30, self._install_fennec_apk, "Install app APK")
if not install_ok:
self.fatal('INFRA-ERROR: Failed to install %s on %s' %
(self.installer_path, self.emulator["name"]),
EXIT_STATUS_DICT[TBPL_RETRY])
(self.installer_path, self.emulator["name"]), EXIT_STATUS_DICT[TBPL_RETRY])
# Install Robocop if required
if self.test_suite and self.test_suite.startswith('robocop'):
install_ok = self._retry(3, 30, self._install_robocop_apk, "Install Robocop APK")
if not install_ok:
self.fatal('INFRA-ERROR: Failed to install %s on %s' %
(self.robocop_path, self.emulator["name"]),
EXIT_STATUS_DICT[TBPL_RETRY])
(self.robocop_path, self.emulator["name"]), EXIT_STATUS_DICT[TBPL_RETRY])
self.info("Finished installing apps for %s" % self.emulator["name"])
@ -788,8 +773,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
# Verification has run out of time. That is okay! Stop running
# tests so that a task timeout is not triggered, and so that
# (partial) results are made available in a timely manner.
self.info("TinderboxPrint: Verification too long: "
"Not all tests were verified.<br/>")
self.info("TinderboxPrint: Verification too long: Not all tests were verified.<br/>")
# Signal verify time exceeded, to break out of suites and
# suite categories loops also.
return False
@ -831,6 +815,7 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
if len(verify_args) > 0:
self._dump_emulator_log()
@PostScriptAction('run-tests')
def stop_emulator(self, action, success=None):
'''
@ -852,7 +837,6 @@ class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin
self._kill_processes(self.config["emulator_process_name"])
super(AndroidEmulatorTest, self).upload_blobber_files()
if __name__ == '__main__':
emulatorTest = AndroidEmulatorTest()
emulatorTest.run_and_exit()

View File

@ -29,7 +29,6 @@ from mozharness.mozilla.testing.codecoverage import (
code_coverage_config_options
)
class AWSY(TestingMixin, MercurialScript, BlobUploadMixin, TooltoolMixin, CodeCoverageMixin):
config_options = [
[["--e10s"],
@ -122,6 +121,7 @@ class AWSY(TestingMixin, MercurialScript, BlobUploadMixin, TooltoolMixin, CodeCo
self.register_virtualenv_module('awsy', self.awsy_path)
def populate_webroot(self):
"""Populate the production test slaves' webroots"""
self.info("Downloading pageset with tooltool...")
@ -140,6 +140,7 @@ class AWSY(TestingMixin, MercurialScript, BlobUploadMixin, TooltoolMixin, CodeCo
self.run_command(unzip_cmd, halt_on_failure=True)
self.run_command("ls %s" % page_load_test_dir)
def run_tests(self, args=None, **kw):
'''
AWSY test should be implemented here

View File

@ -63,7 +63,7 @@ class BouncerSubmitter(BaseScript, PurgeMixin, BouncerSubmitterMixin, BuildbotMi
'submit',
],
config={
'buildbot_json_path': 'buildprops.json'
'buildbot_json_path' : 'buildprops.json'
}
)
self.locales = None
@ -72,21 +72,17 @@ class BouncerSubmitter(BaseScript, PurgeMixin, BouncerSubmitterMixin, BuildbotMi
def _pre_config_lock(self, rw_config):
super(BouncerSubmitter, self)._pre_config_lock(rw_config)
# override properties from buildbot properties here as defined by taskcluster properties
#override properties from buildbot properties here as defined by taskcluster properties
self.read_buildbot_config()
# check if release promotion is true first before overwriting these properties
#check if release promotion is true first before overwriting these properties
if self.buildbot_config["properties"].get("release_promotion"):
for prop in \
['product', 'version', 'build_number', 'revision',
'bouncer_submitter_config', ]:
for prop in ['product', 'version', 'build_number', 'revision', 'bouncer_submitter_config', ]:
if self.buildbot_config["properties"].get(prop):
self.info("Overriding %s with %s" %
(prop, self.buildbot_config["properties"].get(prop)))
self.info("Overriding %s with %s" % (prop, self.buildbot_config["properties"].get(prop)))
self.config[prop] = self.buildbot_config["properties"].get(prop)
if self.buildbot_config["properties"].get("partial_versions"):
self.config["prev_versions"] = \
self.buildbot_config["properties"].get("partial_versions").split(", ")
self.config["prev_versions"] = self.buildbot_config["properties"].get("partial_versions").split(", ")
for opt in ["version", "credentials_file", "bouncer-api-prefix"]:
if opt not in self.config:

View File

@ -24,11 +24,10 @@ sys.path.insert(1, os.path.dirname(sys.path[0]))
from mozharness.base.script import BaseScript
# ConfigTest {{{1
class ConfigTest(BaseScript):
config_options = [[
["--test-file", ],
["--test-file",],
{"action": "extend",
"dest": "test_files",
"help": "Specify which config files to test"
@ -129,8 +128,7 @@ class ConfigTest(BaseScript):
self.info("Good.")
filecount[1] += 1
else:
self.add_summary("%s is valid python, "
"but doesn't create a config dictionary." %
self.add_summary("%s is valid python, but doesn't create a config dictionary." %
config_file, level="error")
if filecount[0]:
self.add_summary("%d of %d python config files were good." %
@ -138,7 +136,6 @@ class ConfigTest(BaseScript):
else:
self.add_summary("No python config files to test.")
# __main__ {{{1
if __name__ == '__main__':
config_test = ConfigTest()

View File

@ -34,6 +34,7 @@ from mozharness.mozilla.signing import SigningMixin
from mozharness.mozilla.updates.balrog import BalrogMixin
from mozharness.mozilla.taskcluster_helper import Taskcluster
from mozharness.base.python import VirtualenvMixin
from mozharness.mozilla.mock import ERROR_MSGS
try:
import simplejson as json
@ -617,8 +618,7 @@ class DesktopSingleLocale(LocalesMixin, ReleaseMixin, MockMixin, BuildbotMixin,
# pass through non-interpolables, like booleans
current_repo[key] = value
except KeyError:
self.error('not all the values in "{0}" can be replaced. Check your '
'configuration'.format(value))
self.error('not all the values in "{0}" can be replaced. Check your configuration'.format(value))
raise
repos.append(current_repo)
self.info("repositories: %s" % repos)
@ -971,8 +971,7 @@ class DesktopSingleLocale(LocalesMixin, ReleaseMixin, MockMixin, BuildbotMixin,
# files
# Locale is hardcoded to en-US, for silly reasons
# The Balrog submitter translates this platform into a build target
# via
# https://github.com/mozilla/build-tools/blob/master/lib/python/release/platforms.py#L23
# via https://github.com/mozilla/build-tools/blob/master/lib/python/release/platforms.py#L23
self.set_buildbot_property("completeMarSize", self.query_filesize(c_marfile))
self.set_buildbot_property("completeMarHash", self.query_sha512sum(c_marfile))
self.set_buildbot_property("completeMarUrl", c_mar_url)
@ -1098,8 +1097,7 @@ class DesktopSingleLocale(LocalesMixin, ReleaseMixin, MockMixin, BuildbotMixin,
'branch': self.config['branch'],
'appName': self.config['appName'],
'platform': self.config['platform'],
'completeMarUrls': {locale: self._query_complete_mar_url(locale)
for locale in locales},
'completeMarUrls': {locale: self._query_complete_mar_url(locale) for locale in locales},
}
self.info('funsize info: %s' % funsize_info)
self.set_buildbot_property('funsize_info', json.dumps(funsize_info),

View File

@ -98,6 +98,7 @@ class DesktopPartnerRepacks(ReleaseMixin, BuildbotMixin, PurgeMixin,
**buildscript_kwargs
)
def _pre_config_lock(self, rw_config):
self.read_buildbot_config()
if not self.buildbot_config:
@ -105,11 +106,9 @@ class DesktopPartnerRepacks(ReleaseMixin, BuildbotMixin, PurgeMixin,
else:
if self.config.get('require_buildprops', False) is True:
if not self.buildbot_config:
self.fatal("Unable to load properties from file: %s" %
self.config.get('buildbot_json_path'))
self.fatal("Unable to load properties from file: %s" % self.config.get('buildbot_json_path'))
props = self.buildbot_config["properties"]
for prop in ['version', 'build_number', 'revision', 'repo_file',
'repack_manifests_url', 'partner']:
for prop in ['version', 'build_number', 'revision', 'repo_file', 'repack_manifests_url', 'partner']:
if props.get(prop):
self.info("Overriding %s with %s" % (prop, props[prop]))
self.config[prop] = props.get(prop)
@ -192,7 +191,6 @@ class DesktopPartnerRepacks(ReleaseMixin, BuildbotMixin, PurgeMixin,
return self.run_command(repack_cmd,
cwd=self.query_abs_dirs()['abs_scripts_dir'])
# main {{{
if __name__ == '__main__':
partner_repacks = DesktopPartnerRepacks()

View File

@ -2,7 +2,6 @@
# ***** BEGIN LICENSE BLOCK *****
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
# ***** END LICENSE BLOCK *****
"""desktop_unittest.py
@ -25,7 +24,7 @@ from datetime import datetime, timedelta
sys.path.insert(1, os.path.dirname(sys.path[0]))
from mozharness.base.errors import BaseErrorList
from mozharness.base.log import INFO
from mozharness.base.log import INFO, ERROR
from mozharness.base.script import PreScriptAction
from mozharness.base.vcs.vcsbase import MercurialScript
from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
@ -40,15 +39,13 @@ from mozharness.mozilla.testing.codecoverage import (
)
from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
SUITE_CATEGORIES = ['gtest', 'cppunittest', 'jittest', 'mochitest', 'reftest', 'xpcshell',
'mozbase', 'mozmill']
SUITE_CATEGORIES = ['gtest', 'cppunittest', 'jittest', 'mochitest', 'reftest', 'xpcshell', 'mozbase', 'mozmill']
SUITE_DEFAULT_E10S = ['mochitest', 'reftest']
SUITE_NO_E10S = ['xpcshell']
# DesktopUnittest {{{1
class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMixin,
CodeCoverageMixin):
class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMixin, CodeCoverageMixin):
config_options = [
[['--mochitest-suite', ], {
"action": "extend",
@ -154,8 +151,7 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
"action": "store_true",
"dest": "allow_software_gl_layers",
"default": False,
"help": "Permits a software GL implementation (such as LLVMPipe) to use "
"the GL compositor."}
"help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor."}
],
[["--single-stylo-traversal"], {
"action": "store_true",
@ -285,10 +281,8 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
dirs['abs_xpcshell_dir'] = os.path.join(dirs['abs_test_install_dir'], "xpcshell")
dirs['abs_cppunittest_dir'] = os.path.join(dirs['abs_test_install_dir'], "cppunittest")
dirs['abs_gtest_dir'] = os.path.join(dirs['abs_test_install_dir'], "gtest")
dirs['abs_blob_upload_dir'] = os.path.join(abs_dirs['abs_work_dir'],
'blobber_upload_dir')
dirs['abs_jittest_dir'] = os.path.join(dirs['abs_test_install_dir'],
"jit-test", "jit-test")
dirs['abs_blob_upload_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'blobber_upload_dir')
dirs['abs_jittest_dir'] = os.path.join(dirs['abs_test_install_dir'], "jit-test", "jit-test")
dirs['abs_mozbase_dir'] = os.path.join(dirs['abs_test_install_dir'], "mozbase")
dirs['abs_mozmill_dir'] = os.path.join(dirs['abs_test_install_dir'], "mozmill")
@ -341,8 +335,10 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
self.register_virtualenv_module(name='mock')
self.register_virtualenv_module(name='simplejson')
requirements_files = [os.path.join(dirs['abs_test_install_dir'],
'config', 'marionette_requirements.txt')]
requirements_files = [
os.path.join(dirs['abs_test_install_dir'],
'config',
'marionette_requirements.txt')]
if self._query_specified_suites('mochitest') is not None:
# mochitest is the only thing that needs this
@ -395,7 +391,7 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
str_format_values = {
'binary_path': self.binary_path,
'symbols_path': self._query_symbols_url(),
'abs_work_dir': dirs['abs_work_dir'],
'abs_work_dir' : dirs['abs_work_dir'],
'abs_app_dir': abs_app_dir,
'abs_res_dir': abs_res_dir,
'raw_log_file': raw_log_file,
@ -423,11 +419,11 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
if suite_category == "mochitest":
base_cmd.append('--bisect-chunk=default')
else:
self.warning("--no-random does not currently work with suites other than "
"mochitest.")
self.warning("--no-random does not currently work with suites other than mochitest.")
if c['headless']:
base_cmd.append('--headless')
base_cmd.append('--headless');
# set pluginsPath
abs_res_plugins_dir = os.path.join(abs_res_dir, 'plugins')
@ -518,8 +514,7 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
return False
if suite_category not in unstructured_flavors:
return True
if not unstructured_flavors.get(suite_category) or \
flavor in unstructured_flavors.get(suite_category):
if not unstructured_flavors.get(suite_category) or flavor in unstructured_flavors.get(suite_category):
return False
return True
@ -565,6 +560,7 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
"--artifact try syntax flag: {}".format(', '.join(rejected)),
exit_code=self.return_code)
def download_and_extract(self):
"""
download and extract test zip / download installer
@ -702,7 +698,7 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
env = {}
if isinstance(suites[suite], dict):
options_list = suites[suite].get('options', [])
if self.config.get('verify') is True:
if self.config.get('verify') == True:
tests_list = []
else:
tests_list = suites[suite].get('tests', [])
@ -777,8 +773,7 @@ class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMix
# Verification has run out of time. That is okay! Stop running
# tests so that a task timeout is not triggered, and so that
# (partial) results are made available in a timely manner.
self.info("TinderboxPrint: Verification too long: Not all tests "
"were verified.<br/>")
self.info("TinderboxPrint: Verification too long: Not all tests were verified.<br/>")
# Signal verify time exceeded, to break out of suites and
# suite categories loops also.
return False

View File

@ -78,8 +78,7 @@ class FxDesktopBuild(BuildScript, TryToolsMixin, object):
],
'stage_product': 'firefox',
'platform_supports_post_upload_to_latest': True,
'build_resources_path': \
'%(abs_src_dir)s/obj-firefox/.mozbuild/build_resources.json',
'build_resources_path': '%(abs_src_dir)s/obj-firefox/.mozbuild/build_resources.json',
'nightly_promotion_branches': ['mozilla-central', 'mozilla-aurora'],
# try will overwrite these
@ -206,6 +205,7 @@ class FxDesktopBuild(BuildScript, TryToolsMixin, object):
self.actions = tuple(rw_config.actions)
self.all_actions = tuple(rw_config.all_actions)
def query_abs_dirs(self):
if self.abs_dirs:
return self.abs_dirs
@ -257,7 +257,6 @@ class FxDesktopBuild(BuildScript, TryToolsMixin, object):
import ctypes
ctypes.windll.kernel32.SetErrorMode(0x8001)
if __name__ == '__main__':
fx_desktop_build = FxDesktopBuild()
fx_desktop_build.run_and_exit()

View File

@ -212,13 +212,11 @@ class L10nBumper(VCSScript):
self.mkdir_p(dirs['abs_work_dir'])
self.rmtree(treestatus_json)
self.run_command(["curl", "--retry", "4", "-o", treestatus_json, treestatus_url],
throw_exception=True)
self.run_command(["curl", "--retry", "4", "-o", treestatus_json, treestatus_url], throw_exception=True)
treestatus = self._read_json(treestatus_json)
if treestatus['result']['status'] != 'closed':
self.info("treestatus is %s - assuming we can land" %
repr(treestatus['result']['status']))
self.info("treestatus is %s - assuming we can land" % repr(treestatus['result']['status']))
return True
return False

View File

@ -7,13 +7,14 @@
import copy
import os
import re
import sys
# load modules from parent dir
sys.path.insert(1, os.path.dirname(sys.path[0]))
from mozharness.base.errors import BaseErrorList, TarErrorList
from mozharness.base.log import INFO
from mozharness.base.log import INFO, ERROR, WARNING
from mozharness.base.script import PreScriptAction
from mozharness.base.transfer import TransferMixin
from mozharness.base.vcs.vcsbase import MercurialScript
@ -33,8 +34,7 @@ from mozharness.mozilla.structuredlog import StructuredOutputParser
# builds is turned off, Bug 1209180.
class MarionetteTest(TestingMixin, MercurialScript, BlobUploadMixin, TransferMixin,
CodeCoverageMixin):
class MarionetteTest(TestingMixin, MercurialScript, BlobUploadMixin, TransferMixin, CodeCoverageMixin):
config_options = [[
["--application"],
{"action": "store",
@ -54,8 +54,7 @@ class MarionetteTest(TestingMixin, MercurialScript, BlobUploadMixin, TransferMix
{"action": "store",
"dest": "marionette_address",
"default": None,
"help": "The host:port of the Marionette server running inside Gecko. "
"Unused for emulator testing",
"help": "The host:port of the Marionette server running inside Gecko. Unused for emulator testing",
}
], [
["--emulator"],
@ -155,8 +154,7 @@ class MarionetteTest(TestingMixin, MercurialScript, BlobUploadMixin, TransferMix
def _pre_config_lock(self, rw_config):
super(MarionetteTest, self)._pre_config_lock(rw_config)
if not self.config.get('emulator') and not self.config.get('marionette_address'):
self.fatal("You need to specify a --marionette-address for non-emulator tests! "
"(Try --marionette-address localhost:2828 )")
self.fatal("You need to specify a --marionette-address for non-emulator tests! (Try --marionette-address localhost:2828 )")
def query_abs_dirs(self):
if self.abs_dirs:

View File

@ -20,12 +20,14 @@ import os
import pprint
import subprocess
import sys
from getpass import getpass
sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
from mozharness.base.errors import HgErrorList
from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
from mozharness.base.vcs.vcsbase import MercurialScript
from mozharness.mozilla.selfserve import SelfServeMixin
from mozharness.mozilla.updates.balrog import BalrogMixin
from mozharness.mozilla.buildbot import BuildbotMixin
from mozharness.mozilla.repo_manipulation import MercurialRepoManipulationMixin
@ -102,16 +104,13 @@ class GeckoMigration(MercurialScript, BalrogMixin, VirtualenvMixin,
"""
message = ""
if self.config['migration_behavior'] not in VALID_MIGRATION_BEHAVIORS:
message += "%s must be one of %s!\n" % (self.config['migration_behavior'],
VALID_MIGRATION_BEHAVIORS)
message += "%s must be one of %s!\n" % (self.config['migration_behavior'], VALID_MIGRATION_BEHAVIORS)
if self.config['migration_behavior'] == 'beta_to_release':
if self.config.get("require_remove_locales") \
and not self.config.get("remove_locales") and 'migrate' in self.actions:
if self.config.get("require_remove_locales") and not self.config.get("remove_locales") and 'migrate' in self.actions:
message += "You must specify --remove-locale!\n"
else:
if self.config.get("require_remove_locales") or self.config.get("remove_locales"):
self.warning("--remove-locale isn't valid unless you're using beta_to_release "
"migration_behavior!\n")
self.warning("--remove-locale isn't valid unless you're using beta_to_release migration_behavior!\n")
if message:
self.fatal(message)
@ -222,8 +221,7 @@ class GeckoMigration(MercurialScript, BalrogMixin, VirtualenvMixin,
dirs = self.query_abs_dirs()
patch_file = os.path.join(dirs['abs_work_dir'], 'patch_file')
self.run_command(
subprocess.list2cmdline(hg + ['diff', '-r', old_head, '.hgtags',
'-U9', '>', patch_file]),
subprocess.list2cmdline(hg + ['diff', '-r', old_head, '.hgtags', '-U9', '>', patch_file]),
cwd=cwd,
)
self.run_command(
@ -326,8 +324,7 @@ class GeckoMigration(MercurialScript, BalrogMixin, VirtualenvMixin,
"""
dirs = self.query_abs_dirs()
next_mb_version = self.get_version(dirs['abs_to_dir'])[0]
self.bump_version(dirs['abs_to_dir'], next_mb_version, next_mb_version, "a1", "",
use_config_suffix=True)
self.bump_version(dirs['abs_to_dir'], next_mb_version, next_mb_version, "a1", "", use_config_suffix=True)
self.apply_replacements()
# bump m-c version
curr_mc_version = self.get_version(dirs['abs_from_dir'])[0]
@ -341,6 +338,7 @@ class GeckoMigration(MercurialScript, BalrogMixin, VirtualenvMixin,
self.touch_clobber_file(dirs['abs_from_dir'])
self.touch_clobber_file(dirs['abs_to_dir'])
def beta_to_release(self, *args, **kwargs):
""" mozilla-beta -> mozilla-release behavior.
@ -494,11 +492,9 @@ class GeckoMigration(MercurialScript, BalrogMixin, VirtualenvMixin,
)
# Call beta_to_release etc.
if not hasattr(self, self.config['migration_behavior']):
self.fatal("Don't know how to proceed with migration_behavior %s !" %
self.config['migration_behavior'])
self.fatal("Don't know how to proceed with migration_behavior %s !" % self.config['migration_behavior'])
getattr(self, self.config['migration_behavior'])(end_tag=end_tag)
self.info("Verify the diff, and apply any manual changes, such as disabling features, "
"and --commit-changes")
self.info("Verify the diff, and apply any manual changes, such as disabling features, and --commit-changes")
# __main__ {{{1

View File

@ -183,8 +183,7 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
rc = self.query_release_config()
repack_env['EN_US_BINARY_URL'] = c['base_en_us_binary_url'] % replace_dict
if 'MOZ_SIGNING_SERVERS' in os.environ:
repack_env['MOZ_SIGN_CMD'] = \
subprocess.list2cmdline(self.query_moz_sign_cmd(formats=['jar']))
repack_env['MOZ_SIGN_CMD'] = subprocess.list2cmdline(self.query_moz_sign_cmd(formats=['jar']))
self.repack_env = repack_env
return self.repack_env
@ -353,8 +352,10 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
abs_dirs = super(MobileSingleLocale, self).query_abs_dirs()
dirs = {
'abs_tools_dir': os.path.join(abs_dirs['base_work_dir'], 'tools'),
'build_dir': os.path.join(abs_dirs['base_work_dir'], 'build'),
'abs_tools_dir':
os.path.join(abs_dirs['base_work_dir'], 'tools'),
'build_dir':
os.path.join(abs_dirs['base_work_dir'], 'build'),
}
abs_dirs.update(dirs)
@ -378,8 +379,7 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
locales = self.query_locales()
for locale in locales:
self.locales_property.setdefault(locale, "Success")
self.set_buildbot_property("locales", json.dumps(self.locales_property),
write_to_file=True)
self.set_buildbot_property("locales", json.dumps(self.locales_property), write_to_file=True)
# Actions {{{2
def clobber(self):
@ -410,8 +410,7 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
# pass through non-interpolables, like booleans
current_repo[key] = value
except KeyError:
self.error('not all the values in "{0}" can be replaced. Check '
'your configuration'.format(value))
self.error('not all the values in "{0}" can be replaced. Check your configuration'.format(value))
raise
repos.append(current_repo)
self.info("repositories: %s" % repos)
@ -424,6 +423,7 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
# list_locales() is defined in LocalesMixin.
def _setup_configure(self, buildid=None):
c = self.config
dirs = self.query_abs_dirs()
env = self.query_repack_env()
make = self.query_exe("make")
@ -510,8 +510,7 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
env=repack_env,
error_list=MakefileErrorList,
halt_on_failure=False):
self.add_failure(locale, message="%s failed in make installers-%s!" %
(locale, locale))
self.add_failure(locale, message="%s failed in make installers-%s!" % (locale, locale))
continue
success_count += 1
self.summarize_success_count(success_count, total_count,
@ -545,8 +544,7 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
continue
success_count += 1
self.summarize_success_count(success_count, total_count,
message="Validated signatures on %d of %d "
"binaries successfully.")
message="Validated signatures on %d of %d binaries successfully.")
def taskcluster_upload(self):
auth = os.path.join(os.getcwd(), self.config['taskcluster_credentials_file'])
@ -635,9 +633,7 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
continue
total_count += 1
if c.get('base_post_upload_cmd'):
upload_env['POST_UPLOAD_CMD'] = c['base_post_upload_cmd'] % \
{'version': version, 'locale': locale, 'buildnum': str(buildnum),
'post_upload_extra': ' '.join(c.get('post_upload_extra', []))}
upload_env['POST_UPLOAD_CMD'] = c['base_post_upload_cmd'] % {'version': version, 'locale': locale, 'buildnum': str(buildnum), 'post_upload_extra': ' '.join(c.get('post_upload_extra', []))}
output = self.get_output_from_command_m(
# Ugly hack to avoid |make upload| stderr from showing up
# as get_output_from_command errors
@ -691,7 +687,7 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
rev = self.vcs_checkout(**repos[0])
self.set_buildbot_property("tools_revision", rev, write_to_file=True)
def query_apkfile_path(self, locale):
def query_apkfile_path(self,locale):
dirs = self.query_abs_dirs()
apkdir = os.path.join(dirs['abs_objdir'], 'dist')
@ -702,8 +698,7 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
if f.endswith(".apk") and re.search(r, f):
apks.append(f)
if len(apks) == 0:
self.fatal("Found no apks files in %s, don't know what to do:\n%s" %
(apkdir, apks), exit_code=1)
self.fatal("Found no apks files in %s, don't know what to do:\n%s" % (apkdir, apks), exit_code=1)
return os.path.join(apkdir, apks[0])
@ -726,13 +721,11 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
for locale in locales:
apk_url = self.query_upload_url(locale)
if not apk_url:
self.add_failure(locale, message="Failed to detect %s url in make upload!" %
(locale))
self.add_failure(locale, message="Failed to detect %s url in make upload!" % (locale))
balrogReady = False
continue
if not balrogReady:
return self.fatal(message="Not all repacks successful, abort without "
"submitting to balrog.")
return self.fatal(message="Not all repacks successful, abort without submitting to balrog")
env = self.query_upload_env()
for locale in locales:
@ -747,15 +740,15 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
self.set_buildbot_property("completeMarUrl", apk_url)
# The Balrog submitter translates this platform into a build target
# via https://github.com/mozilla/build-tools/blob/master/lib/python/release/platforms.py#L23 # noqa
# via https://github.com/mozilla/build-tools/blob/master/lib/python/release/platforms.py#L23
self.set_buildbot_property(
"platform",
self.buildbot_config["properties"]["platform"])
# TODO: Is there a better way to get this?
#TODO: Is there a better way to get this?
# Set other necessary properties for Balrog submission. None need to
# be passed back to buildbot, so we won't write them to the properties
# files.
#files.
self.set_buildbot_property("locale", locale)
self.set_buildbot_property("appVersion", self.query_version())
@ -785,7 +778,6 @@ class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
if not self.query_is_nightly():
self.submit_balrog_release_pusher(dirs)
# main {{{1
if __name__ == '__main__':
single_locale = MobileSingleLocale()

View File

@ -173,8 +173,7 @@ class MobilePartnerRepack(LocalesMixin, ReleaseMixin, MobileSigningMixin,
total_count += 1
if not self.download_file(url, file_path):
self.add_failure(platform, locale,
message="Unable to "
"download %(platform)s:%(locale)s installer!")
message="Unable to download %(platform)s:%(locale)s installer!")
else:
success_count += 1
self.summarize_success_count(success_count, total_count,
@ -238,23 +237,19 @@ class MobilePartnerRepack(LocalesMixin, ReleaseMixin, MobileSigningMixin,
success_count = total_count = 0
for platform in c['platforms']:
for locale in locales:
installer_name = c['installer_base_names'][platform] % \
{'version': rc['version'], 'locale': locale}
installer_name = c['installer_base_names'][platform] % {'version': rc['version'], 'locale': locale}
if self.query_failure(platform, locale):
self.warning("%s:%s had previous issues; skipping!" % (platform, locale))
continue
original_path = '%s/original/%s/%s/%s' % \
(dirs['abs_work_dir'], platform, locale, installer_name)
original_path = '%s/original/%s/%s/%s' % (dirs['abs_work_dir'], platform, locale, installer_name)
for partner in c['partner_config'].keys():
repack_path = '%s/unsigned/partner-repacks/%s/%s/%s/%s' % \
(dirs['abs_work_dir'], partner, platform, locale, installer_name)
repack_path = '%s/unsigned/partner-repacks/%s/%s/%s/%s' % (dirs['abs_work_dir'], partner, platform, locale, installer_name)
total_count += 1
if self._repack_apk(partner, original_path, repack_path):
success_count += 1
else:
self.add_failure(platform, locale,
message="Unable to repack %(platform)s:%(locale)s "
"installer!")
message="Unable to repack %(platform)s:%(locale)s installer!")
self.summarize_success_count(success_count, total_count,
message="Repacked %d of %d installers successfully.")
@ -292,16 +287,13 @@ class MobilePartnerRepack(LocalesMixin, ReleaseMixin, MobileSigningMixin,
success_count = total_count = 0
for platform in c['platforms']:
for locale in locales:
installer_name = c['installer_base_names'][platform] % \
{'version': rc['version'], 'locale': locale}
installer_name = c['installer_base_names'][platform] % {'version': rc['version'], 'locale': locale}
if self.query_failure(platform, locale):
self.warning("%s:%s had previous issues; skipping!" % (platform, locale))
continue
for partner in c['partner_config'].keys():
unsigned_path = '%s/unsigned/partner-repacks/%s/%s/%s/%s' % \
(dirs['abs_work_dir'], partner, platform, locale, installer_name)
signed_dir = '%s/partner-repacks/%s/%s/%s' % \
(dirs['abs_work_dir'], partner, platform, locale)
unsigned_path = '%s/unsigned/partner-repacks/%s/%s/%s/%s' % (dirs['abs_work_dir'], partner, platform, locale, installer_name)
signed_dir = '%s/partner-repacks/%s/%s/%s' % (dirs['abs_work_dir'], partner, platform, locale)
signed_path = "%s/%s" % (signed_dir, installer_name)
total_count += 1
self.info("Signing %s %s." % (platform, locale))
@ -311,8 +303,7 @@ class MobilePartnerRepack(LocalesMixin, ReleaseMixin, MobileSigningMixin,
if self.sign_apk(unsigned_path, c['keystore'],
self.store_passphrase, self.key_passphrase,
c['key_alias']) != 0:
self.add_summary("Unable to sign %s:%s apk!" % (platform, locale),
level=FATAL)
self.add_summary("Unable to sign %s:%s apk!" % (platform, locale), level=FATAL)
else:
self.mkdir_p(signed_dir)
if self.align_apk(unsigned_path, signed_path):

View File

@ -122,8 +122,8 @@ class AntivirusScan(BaseScript, VirtualenvMixin):
def get_extract_script(self):
"""Gets a copy of extract_and_run_command.py from tools, and the supporting mar.py,
so that we can unpack various files for clam to scan them."""
remote_file = "{}/raw-file/{}/stage/extract_and_run_command.py"\
.format(self.config["tools_repo"], self.config["tools_revision"])
remote_file = "{}/raw-file/{}/stage/extract_and_run_command.py".format(self.config["tools_repo"],
self.config["tools_revision"])
self.download_file(remote_file, file_name="extract_and_run_command.py")
def get_files(self):
@ -166,8 +166,7 @@ class AntivirusScan(BaseScript, VirtualenvMixin):
if self._matches_exclude(keyname):
self.debug("Excluding {}".format(keyname))
else:
destination = os.path.join(self.dest_dir,
keyname.replace(candidates_prefix, ''))
destination = os.path.join(self.dest_dir, keyname.replace(candidates_prefix, ''))
dest_dir = os.path.dirname(destination)
if not os.path.isdir(dest_dir):
os.makedirs(dest_dir)

View File

@ -145,8 +145,7 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
# Default configuration
'config': {
# base index url where to find taskcluster artifact based on taskid
"artifact_base_url": \
'https://queue.taskcluster.net/v1/task/{taskid}/artifacts/public/{subdir}',
"artifact_base_url": 'https://queue.taskcluster.net/v1/task/{taskid}/artifacts/public/{subdir}',
"virtualenv_modules": [
"boto",
"PyYAML",
@ -158,7 +157,7 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
"virtualenv_path": "venv",
},
}
# todo do excludes need to be configured via command line for specific builds?
#todo do excludes need to be configured via command line for specific builds?
super(BeetMover, self).__init__(**beetmover_kwargs)
c = self.config
@ -167,8 +166,7 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
self.virtualenv_imports = None
self.bucket = c['bucket']
if not all(aws_creds):
self.fatal('credentials must be passed in env: '
'"AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"')
self.fatal('credentials must be passed in env: "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"')
self.aws_key_id, self.aws_secret_key = aws_creds
# if excludes is set from command line, use it otherwise use defaults
self.excludes = self.config.get('excludes', DEFAULT_EXCLUDES)
@ -258,8 +256,7 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
for locale in self.manifest['mapping']:
for deliverable in self.manifest['mapping'][locale]:
self.log("downloading '{}' deliverable for '{}' locale".format(deliverable,
locale))
self.log("downloading '{}' deliverable for '{}' locale".format(deliverable, locale))
source = self.manifest['mapping'][locale][deliverable]['artifact']
self.retry(
self.download_file,
@ -291,8 +288,7 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
# we have already downloaded the files locally so we can use that version
source = self.manifest['mapping'][locale][deliverable]['artifact']
s3_key = self.manifest['mapping'][locale][deliverable]['s3_key']
downloaded_file = os.path.join(dirs['abs_work_dir'],
self.get_filename_from_url(source))
downloaded_file = os.path.join(dirs['abs_work_dir'], self.get_filename_from_url(source))
# generate checksums for every uploaded file
beet_file_name = '{}.beet'.format(downloaded_file)
# upload checksums to a separate subdirectory
@ -314,6 +310,7 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
bucket=bucket)
self.log('Success!')
def upload_bit(self, source, s3_key, bucket):
boto = self.virtualenv_imports['boto']
self.info('uploading to s3 with key: {}'.format(s3_key))
@ -327,8 +324,8 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
key = bucket.new_key(s3_key)
# set key value
mime_type, _ = mimetypes.guess_type(source)
self.retry(lambda: key.set_contents_from_filename(
source, headers={'Content-Type': mime_type}), error_level=FATAL),
self.retry(lambda: key.set_contents_from_filename(source, headers={'Content-Type': mime_type}),
error_level=FATAL),
else:
if not get_hash(key.get_contents_as_string()) == get_hash(open(source).read()):
# for now, let's halt. If necessary, we can revisit this and allow for overwrites
@ -340,16 +337,14 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
dirs = self.query_abs_dirs()
filenames = [f for f in listdir(dirs['abs_work_dir'])
if isfile(join(dirs['abs_work_dir'], f))]
filenames = [f for f in listdir(dirs['abs_work_dir']) if isfile(join(dirs['abs_work_dir'], f))]
self.mkdir_p(self.dest_dir)
for file_name in filenames:
if self._matches_exclude(file_name):
self.info("Excluding {} from virus scan".format(file_name))
else:
self.info('Copying {} to {}'.format(file_name, self.dest_dir))
self.copyfile(os.path.join(dirs['abs_work_dir'], file_name),
os.path.join(self.dest_dir, file_name))
self.info('Copying {} to {}'.format(file_name,self.dest_dir))
self.copyfile(os.path.join(dirs['abs_work_dir'], file_name), os.path.join(self.dest_dir,file_name))
self._scan_files()
self.info('Emptying {}'.format(self.dest_dir))
self.rmtree(self.dest_dir)
@ -357,10 +352,9 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
def _scan_files(self):
"""Scan the files we've collected. We do the download and scan concurrently to make
it easier to have a coherent log afterwards. Uses the venv python."""
external_tools_path = os.path.join(os.path.abspath(os.path.dirname(
os.path.dirname(mozharness.__file__))), 'external_tools')
self.run_command([self.query_python_path(), os.path.join(external_tools_path,
'extract_and_run_command.py'),
external_tools_path = os.path.join(
os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))), 'external_tools')
self.run_command([self.query_python_path(), os.path.join(external_tools_path,'extract_and_run_command.py'),
'-j{}'.format(self.config['scan_parallelization']),
'clamscan', '--no-summary', '--', self.dest_dir])
@ -372,7 +366,6 @@ class BeetMover(BaseScript, VirtualenvMixin, object):
mimetypes.init()
map(lambda (ext, mime_type,): mimetypes.add_type(mime_type, ext), MIME_MAP.items())
if __name__ == '__main__':
beet_mover = BeetMover(pop_aws_auth_from_env())
beet_mover.run_and_exit()

View File

@ -16,31 +16,27 @@ from mozharness.mozilla.signing import SigningMixin
from mozharness.mozilla.buildbot import BuildbotMixin
from mozharness.mozilla.merkle import MerkleTree
class ChecksumsGenerator(BaseScript, VirtualenvMixin, SigningMixin, VCSMixin, BuildbotMixin):
config_options = [
[["--stage-product"], {
"dest": "stage_product",
"help": "Name of product used in file server's directory structure, "
"e.g.: firefox, mobile",
"help": "Name of product used in file server's directory structure, eg: firefox, mobile",
}],
[["--version"], {
"dest": "version",
"help": "Version of release, e.g.: 39.0b5",
"help": "Version of release, eg: 39.0b5",
}],
[["--build-number"], {
"dest": "build_number",
"help": "Build number of release, e.g.: 2",
"help": "Build number of release, eg: 2",
}],
[["--bucket-name-prefix"], {
"dest": "bucket_name_prefix",
"help": "Prefix of bucket name, e.g.: net-mozaws-prod-delivery. This will be used to "
"generate a full bucket name (such as "
"net-mozaws-prod-delivery-{firefox,archive}.",
"help": "Prefix of bucket name, eg: net-mozaws-prod-delivery. This will be used to generate a full bucket name (such as net-mozaws-prod-delivery-{firefox,archive}.",
}],
[["--bucket-name-full"], {
"dest": "bucket_name_full",
"help": "Full bucket name, e.g.: net-mozaws-prod-delivery-firefox",
"help": "Full bucket name, eg: net-mozaws-prod-delivery-firefox",
}],
[["-j", "--parallelization"], {
"dest": "parallelization",
@ -58,8 +54,7 @@ class ChecksumsGenerator(BaseScript, VirtualenvMixin, SigningMixin, VCSMixin, Bu
"dest": "includes",
"default": [],
"action": "append",
"help": "List of patterns to include in big checksums file. See script "
"source for default.",
"help": "List of patterns to include in big checksums file. See script source for default.",
}],
[["--tools-repo"], {
"dest": "tools_repo",
@ -149,8 +144,7 @@ class ChecksumsGenerator(BaseScript, VirtualenvMixin, SigningMixin, VCSMixin, Bu
return self.config['bucket_name_full']
suffix = "archive"
# Firefox has a special bucket, per
# https://github.com/mozilla-services/product-delivery-tools/blob/master/bucketmap.go
# Firefox has a special bucket, per https://github.com/mozilla-services/product-delivery-tools/blob/master/bucketmap.go
if self.config["stage_product"] == "firefox":
suffix = "firefox"
@ -194,7 +188,6 @@ class ChecksumsGenerator(BaseScript, VirtualenvMixin, SigningMixin, VCSMixin, Bu
# Temporary holding place for checksums
raw_checksums = []
def worker(item):
self.debug("Downloading {}".format(item))
# TODO: It would be nice to download the associated .asc file
@ -229,8 +222,7 @@ class ChecksumsGenerator(BaseScript, VirtualenvMixin, SigningMixin, VCSMixin, Bu
for pattern in self.config["includes"]:
if re.search(pattern, f):
if f in self.checksums:
self.fatal("Found duplicate checksum entry for {}, "
"don't know which one to pick.".format(f))
self.fatal("Found duplicate checksum entry for {}, don't know which one to pick.".format(f))
if not set(self.config["formats"]) <= set(info["hashes"]):
self.fatal("Missing necessary format for file {}".format(f))
self.debug("Adding checksums for file: {}".format(f))
@ -252,8 +244,7 @@ class ChecksumsGenerator(BaseScript, VirtualenvMixin, SigningMixin, VCSMixin, Bu
tree = MerkleTree(hash_fn, data)
head = tree.head().encode("hex")
proofs = [tree.inclusion_proof(i).to_rfc6962_bis().encode("hex")
for i in range(len(files))]
proofs = [tree.inclusion_proof(i).to_rfc6962_bis().encode("hex") for i in range(len(files))]
summary = self._get_summary_filename(fmt)
self.info("Creating summary file: {}".format(summary))

View File

@ -185,7 +185,6 @@ class PostReleaseVersionBump(MercurialScript, BuildbotMixin,
revision=self.config["revision"], message=message,
user=self.config["hg_user"], force=True)
# __main__ {{{1
if __name__ == '__main__':
PostReleaseVersionBump().run_and_exit()

View File

@ -166,6 +166,7 @@ class PublishBalrog(MercurialScript, BuildbotMixin):
error_level=FATAL)
# __main__ {{{1
if __name__ == '__main__':
PublishBalrog().run_and_exit()

View File

@ -152,12 +152,11 @@ class ReleasePusher(BaseScript, VirtualenvMixin):
def copy_key():
source_key = bucket.get_key(source)
dest_key = bucket.get_key(destination)
# According to
# http://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html
# According to http://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html
# S3 key MD5 is represented as ETag, except when objects are
# uploaded using multipart method. In this case objects's ETag
# is constructed using its MD5, minus symbol, and number of
# part. See http://stackoverflow.com/questions/12186993/what-is-the-algorithm-to-compute-the-amazon-s3-etag-for-a-file-larger-than-5gb#answer-19896823 # noqa
# part. See http://stackoverflow.com/questions/12186993/what-is-the-algorithm-to-compute-the-amazon-s3-etag-for-a-file-larger-than-5gb#answer-19896823
source_md5 = source_key.etag.split("-")[0]
if dest_key:
dest_md5 = dest_key.etag.split("-")[0]
@ -174,8 +173,7 @@ class ReleasePusher(BaseScript, VirtualenvMixin):
destination, dest_md5))
else:
self.fatal(
"{} already exists with the different content "
"(src ETag: {}, dest ETag: {}), aborting".format(
"{} already exists with the different content (src ETag: {}, dest ETag: {}), aborting".format(
destination, source_key.etag, dest_key.etag))
return retry(copy_key, sleeptime=5, max_sleeptime=60,
@ -197,7 +195,6 @@ class ReleasePusher(BaseScript, VirtualenvMixin):
pool = ThreadPool(self.config["parallelization"])
pool.map(worker, find_release_files())
if __name__ == "__main__":
myScript = ReleasePusher(pop_aws_auth_from_env())
myScript.run_and_exit()

View File

@ -16,8 +16,7 @@ class CTSubmitter(BaseScript, VirtualenvMixin):
config_options = [
[["--chain"], {
"dest": "chain",
"help": "URL from which to download the cert chain to be "
"submitted to CT (in PEM format)"
"help": "URL from which to download the cert chain to be submitted to CT (in PEM format)"
}],
[["--log"], {
"dest": "log",
@ -61,7 +60,7 @@ class CTSubmitter(BaseScript, VirtualenvMixin):
chain = retry(get_chain)
req = {"chain": []}
req = { "chain": [] }
chain = pem.parse(chain)
for i in range(len(chain)):
cert = crypto.load_certificate(crypto.FILETYPE_PEM, str(chain[i]))
@ -77,7 +76,6 @@ class CTSubmitter(BaseScript, VirtualenvMixin):
sct = SignedCertificateTimestamp(resp)
self.write_to_file(self.sct_filename, sct.to_rfc6962())
if __name__ == "__main__":
myScript = CTSubmitter()
myScript.run_and_exit()

View File

@ -94,7 +94,8 @@ class UpdatesBumper(MercurialScript, BuildbotMixin,
# TODO: version and appVersion should come from repo
props = self.buildbot_config["properties"]
for prop in ['product', 'version', 'build_number', 'revision',
'appVersion', 'balrog_api_root', "channels"]:
'appVersion', 'balrog_api_root', "channels",
'generate_bz2_blob']:
if props.get(prop):
self.info("Overriding %s with %s" % (prop, props[prop]))
self.config[prop] = props.get(prop)
@ -269,6 +270,10 @@ class UpdatesBumper(MercurialScript, BuildbotMixin,
def submit_to_balrog(self):
for _, channel_config in self.query_channel_configs():
self._submit_to_balrog(channel_config)
if 'generate_bz2_blob' in self.config and \
self.config['generate_bz2_blob']:
for _, channel_config in self.query_channel_configs():
self._submit_to_balrog_bz2(channel_config)
def _submit_to_balrog(self, channel_config):
dirs = self.query_abs_dirs()
@ -306,6 +311,59 @@ class UpdatesBumper(MercurialScript, BuildbotMixin,
self.retry(lambda: self.run_command(cmd, halt_on_failure=True))
def _submit_to_balrog_bz2(self, channel_config):
if "bz2_blob_suffix" not in channel_config:
self.info("No need to generate BZ2 blob")
return
dirs = self.query_abs_dirs()
# Use env varialbe instead of command line to avoid issues with blob
# names starting with "-", e.g. "-bz2"
env = {"BALROG_BLOB_SUFFIX": channel_config["bz2_blob_suffix"]}
auth = os.path.join(os.getcwd(), self.config['credentials_file'])
cmd = [
sys.executable,
os.path.join(dirs["abs_tools_dir"],
"scripts/build-promotion/balrog-release-pusher.py")]
cmd.extend([
"--api-root", self.config["balrog_api_root"],
"--download-domain", self.config["download_domain"],
"--archive-domain", self.config["archive_domain"],
"--credentials-file", auth,
"--product", self.config["product"],
"--version", self.config["version"],
"--build-number", str(self.config["build_number"]),
"--app-version", self.config["appVersion"],
"--username", self.config["balrog_username"],
"--complete-mar-filename-pattern",
channel_config["complete_mar_filename_pattern"],
"--complete-mar-bouncer-product-pattern",
channel_config["complete_mar_bouncer_product_pattern"],
"--verbose",
])
for v, build_number in self.query_matching_partials(channel_config):
if v < "56.0":
self.info("Adding %s to partials" % v)
partial = "{version}build{build_number}".format(
version=v, build_number=build_number)
cmd.extend(["--partial-update", partial])
else:
self.info("Not adding %s to partials" % v)
for c in channel_config["channel_names"]:
cmd.extend(["--channel", c])
for r in channel_config["bz2_rules_to_update"]:
cmd.extend(["--rule-to-update", r])
for p in self.config["platforms"]:
cmd.extend(["--platform", p])
if channel_config["requires_mirrors"]:
cmd.append("--requires-mirrors")
if self.config["balrog_use_dummy_suffix"]:
cmd.append("--dummy")
self.retry(lambda: self.run_command(cmd, halt_on_failure=True, env=env))
# __main__ {{{1
if __name__ == '__main__':

View File

@ -151,8 +151,7 @@ class SpidermonkeyBuild(MockMixin,
if self.buildbot_config:
bb_props = [('mock_target', 'mock_target', None),
('hgurl', 'hgurl', None),
('clobberer_url', 'clobberer_url',
'https://api.pub.build.mozilla.org/clobberer/lastclobber'),
('clobberer_url', 'clobberer_url', 'https://api.pub.build.mozilla.org/clobberer/lastclobber'),
('force_clobber', 'force_clobber', None),
('branch', 'blob_upload_branch', None),
]
@ -167,7 +166,7 @@ class SpidermonkeyBuild(MockMixin,
dirs = self.query_abs_dirs()
replacements = self.config['env_replacements'].copy()
for k, v in replacements.items():
for k,v in replacements.items():
replacements[k] = v % dirs
self.env = self.query_env(replace_dict=replacements,
@ -237,16 +236,14 @@ class SpidermonkeyBuild(MockMixin,
def query_compiler_manifest(self):
dirs = self.query_abs_dirs()
manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'],
self.config['compiler_manifest'])
manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'], self.config['compiler_manifest'])
if os.path.exists(manifest):
return manifest
return os.path.join(dirs['abs_work_dir'], self.config['compiler_manifest'])
def query_sixgill_manifest(self):
dirs = self.query_abs_dirs()
manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'],
self.config['sixgill_manifest'])
manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'], self.config['sixgill_manifest'])
if os.path.exists(manifest):
return manifest
return os.path.join(dirs['abs_work_dir'], self.config['sixgill_manifest'])

View File

@ -18,7 +18,7 @@ GECKO_SRCDIR = os.path.join(os.path.expanduser('~'), 'checkouts', 'gecko')
TELEMETRY_TEST_HOME = os.path.join(GECKO_SRCDIR, 'toolkit', 'components', 'telemetry',
'tests', 'marionette')
from mozharness.base.python import PreScriptAction
from mozharness.base.python import PostScriptRun, PreScriptAction
from mozharness.mozilla.structuredlog import StructuredOutputParser
from mozharness.mozilla.testing.testbase import (
TestingMixin,
@ -36,8 +36,7 @@ telemetry_tests_config_options = [
"action": "store_true",
"dest": "allow_software_gl_layers",
"default": False,
"help": "Permits a software GL implementation (such as LLVMPipe) "
"to use the GL compositor.",
"help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor.",
}],
[["--enable-webrender"], {
"action": "store_true",
@ -102,6 +101,7 @@ class TelemetryTests(TestingMixin, VCSToolsScript, CodeCoverageMixin):
@PreScriptAction('create-virtualenv')
def _pre_create_virtualenv(self, action):
dirs = self.query_abs_dirs()
requirements = os.path.join(GECKO_SRCDIR, 'testing',
'config', 'telemetry_tests_requirements.txt')
@ -158,8 +158,7 @@ class TelemetryTests(TestingMixin, VCSToolsScript, CodeCoverageMixin):
strict=False)
# Add the default tests to run
tests = [os.path.join(dirs['abs_telemetry_dir'], 'tests', test)
for test in self.default_tests]
tests = [os.path.join(dirs['abs_telemetry_dir'], 'tests', test) for test in self.default_tests]
cmd.extend(tests)
# Set further environment settings

View File

@ -5,6 +5,8 @@
# You can obtain one at http://mozilla.org/MPL/2.0/.
# ***** END LICENSE BLOCK *****
import copy
import glob
import json
import os
import sys
@ -15,7 +17,7 @@ from mozharness.base.errors import BaseErrorList
from mozharness.base.script import PreScriptAction
from mozharness.base.vcs.vcsbase import MercurialScript
from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options, TOOLTOOL_PLATFORM_DIR
from mozharness.mozilla.testing.codecoverage import (
CodeCoverageMixin,
code_coverage_config_options
@ -25,7 +27,6 @@ from mozharness.mozilla.testing.errors import HarnessErrorList
from mozharness.mozilla.structuredlog import StructuredOutputParser
from mozharness.base.log import INFO
class WebPlatformTest(TestingMixin, MercurialScript, BlobUploadMixin, CodeCoverageMixin):
config_options = [
[['--test-type'], {
@ -53,8 +54,7 @@ class WebPlatformTest(TestingMixin, MercurialScript, BlobUploadMixin, CodeCovera
"action": "store_true",
"dest": "allow_software_gl_layers",
"default": False,
"help": "Permits a software GL implementation (such as LLVMPipe) "
"to use the GL compositor."}
"help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor."}
],
[["--enable-webrender"], {
"action": "store_true",
@ -168,7 +168,7 @@ class WebPlatformTest(TestingMixin, MercurialScript, BlobUploadMixin, CodeCovera
def _query_cmd(self):
if not self.binary_path:
self.fatal("Binary path could not be determined")
# And exit
#And exit
c = self.config
dirs = self.query_abs_dirs()
@ -266,8 +266,7 @@ class WebPlatformTest(TestingMixin, MercurialScript, BlobUploadMixin, CodeCovera
if not sys.platform.startswith("darwin"):
font_path = os.path.join(os.path.dirname(self.binary_path), "fonts")
else:
font_path = os.path.join(os.path.dirname(self.binary_path), os.pardir,
"Resources", "res", "fonts")
font_path = os.path.join(os.path.dirname(self.binary_path), os.pardir, "Resources", "res", "fonts")
if not os.path.exists(font_path):
os.makedirs(font_path)
ahem_src = os.path.join(dirs["abs_wpttest_dir"], "tests", "fonts", "Ahem.ttf")

View File

@ -2,6 +2,10 @@
flake8:
description: Python linter
include:
- build/moz.configure/*.configure
- build/*.py
- configure.py
- config/check_macroassembler_style.py
- config/mozunit.py
- layout/tools/reftest
- python/mach
@ -16,11 +20,8 @@ flake8:
- testing/marionette/client
- testing/marionette/harness
- testing/marionette/puppeteer
- testing/mochitest
- testing/mozbase
- testing/mozharness/mozfile
- testing/mozharness/mozinfo
- testing/mozharness/scripts
- testing/mochitest
- testing/remotecppunittests.py
- testing/runcppunittests.py
- testing/talos/
@ -33,6 +34,7 @@ flake8:
# Excludes should be added to topsrcdir/.flake8 due to a bug in flake8 where
# specifying --exclude causes custom configuration files to be ignored.
exclude: []
extensions: ['py']
# The configure option is used by the build system
extensions: ['configure', 'py']
type: external
payload: python.flake8:lint