2017-03-28 14:41:38 +00:00
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
|
|
|
|
sys.path.insert(1, os.path.dirname(sys.path[0])) # noqa - don't warn about imports
|
|
|
|
|
2017-03-28 20:26:56 +00:00
|
|
|
from mozharness.base.log import FATAL
|
2017-03-28 14:41:38 +00:00
|
|
|
from mozharness.base.script import BaseScript
|
|
|
|
|
|
|
|
|
|
|
|
class Repackage(BaseScript):
|
|
|
|
|
|
|
|
def __init__(self, require_config_file=False):
|
|
|
|
script_kwargs = {
|
|
|
|
'all_actions': [
|
|
|
|
"download_input",
|
|
|
|
"setup",
|
|
|
|
"repackage",
|
|
|
|
],
|
|
|
|
}
|
|
|
|
BaseScript.__init__(
|
|
|
|
self,
|
|
|
|
require_config_file=require_config_file,
|
|
|
|
**script_kwargs
|
|
|
|
)
|
|
|
|
|
|
|
|
def download_input(self):
|
|
|
|
config = self.config
|
2017-06-16 19:48:00 +00:00
|
|
|
dirs = self.query_abs_dirs()
|
2017-03-28 14:41:38 +00:00
|
|
|
|
2017-06-16 19:48:00 +00:00
|
|
|
input_home = config['input_home'].format(**dirs)
|
|
|
|
|
|
|
|
for path, url in config["download_config"].items():
|
|
|
|
status = self.download_file(url=url,
|
|
|
|
file_name=path,
|
|
|
|
parent_dir=input_home)
|
|
|
|
if not status:
|
|
|
|
self.fatal("Unable to fetch signed input from %s" % url)
|
2017-03-28 14:41:38 +00:00
|
|
|
|
2017-06-27 21:26:16 +00:00
|
|
|
if 'mar' in path:
|
|
|
|
# Ensure mar is executable
|
|
|
|
self.chmod(os.path.join(input_home, path), 0755)
|
|
|
|
|
2017-03-28 14:41:38 +00:00
|
|
|
def setup(self):
|
|
|
|
self._run_tooltool()
|
2017-07-18 16:13:31 +00:00
|
|
|
if self.config.get("run_configure", True):
|
|
|
|
self._get_mozconfig()
|
|
|
|
self._run_configure()
|
2017-03-28 14:41:38 +00:00
|
|
|
|
|
|
|
def query_abs_dirs(self):
|
|
|
|
if self.abs_dirs:
|
|
|
|
return self.abs_dirs
|
|
|
|
abs_dirs = super(Repackage, self).query_abs_dirs()
|
2017-06-27 21:26:16 +00:00
|
|
|
config = self.config
|
2017-03-28 14:41:38 +00:00
|
|
|
for directory in abs_dirs:
|
|
|
|
value = abs_dirs[directory]
|
|
|
|
abs_dirs[directory] = value
|
2017-07-18 16:13:31 +00:00
|
|
|
|
2017-03-28 14:41:38 +00:00
|
|
|
dirs = {}
|
|
|
|
dirs['abs_tools_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'tools')
|
|
|
|
dirs['abs_mozilla_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'src')
|
2017-07-18 15:57:26 +00:00
|
|
|
locale_dir = ''
|
|
|
|
if config.get('locale'):
|
|
|
|
locale_dir = "{}{}".format(os.path.sep, config['locale'])
|
2018-04-18 18:06:17 +00:00
|
|
|
repack_id_dir = ''
|
|
|
|
if config.get('repack_id'):
|
|
|
|
repack_id_dir = "{}{}".format(os.path.sep, config['repack_id'])
|
|
|
|
dirs['output_home'] = config['output_home'].format(
|
|
|
|
locale=locale_dir,
|
|
|
|
repack_id=repack_id_dir,
|
|
|
|
**abs_dirs
|
|
|
|
)
|
2017-03-28 14:41:38 +00:00
|
|
|
for key in dirs.keys():
|
|
|
|
if key not in abs_dirs:
|
|
|
|
abs_dirs[key] = dirs[key]
|
|
|
|
self.abs_dirs = abs_dirs
|
|
|
|
return self.abs_dirs
|
|
|
|
|
|
|
|
def repackage(self):
|
|
|
|
config = self.config
|
|
|
|
dirs = self.query_abs_dirs()
|
2017-06-16 19:48:00 +00:00
|
|
|
|
|
|
|
# Make sure the upload dir is around.
|
|
|
|
self.mkdir_p(dirs['output_home'])
|
|
|
|
|
|
|
|
for repack_config in config["repackage_config"]:
|
|
|
|
command = [sys.executable, 'mach', '--log-no-times', 'repackage'] + \
|
|
|
|
[arg.format(**dirs)
|
|
|
|
for arg in list(repack_config)]
|
|
|
|
self.run_command(
|
|
|
|
command=command,
|
|
|
|
cwd=dirs['abs_mozilla_dir'],
|
|
|
|
halt_on_failure=True,
|
|
|
|
)
|
2017-03-28 14:41:38 +00:00
|
|
|
|
|
|
|
def _run_tooltool(self):
|
|
|
|
config = self.config
|
|
|
|
dirs = self.query_abs_dirs()
|
2017-12-21 22:48:14 +00:00
|
|
|
toolchains = os.environ.get('MOZ_TOOLCHAINS')
|
Bug 1321847 - Allow to override the mozharness tooltool manifest from the environment. r=mshal
The main motivation behind this change is that going towards toolchain
tasks hooked up in the task graph (bug 1313111), we're going to end up
with jobs using both taskcluster toolchain job and tooltool artifacts
for their toolchain needs. With the current setup, this means the
toolchain dependencies will be spread between taskcluster task graph
definition and mozharness configuration.
It also makes things more complex to provide a command that pulls the
right toolchains from both taskcluster and tooltool (bug 1356529),
because one needs to find and parse the mozharness config (which also
happens to be python code that uses platform-specific things, so e.g.
reading windows mozharness config fails on other platforms).
All in all, moving the tooltool manifest path to the taskcluster task
definitions would make things simpler, and would also allow make patches
switching from tooltool to taskcluster artifacts more straightforward to
validate.
But since some build types still run on buildbot, we'll have to keep
part of the current setup using mozharness configs. So we allow to
override the tooltool manifest path from the environment, and we'll rely
on taskcluster task definitions being able to set environment variables.
Actually moving the relevant tooltool manifest paths from mozharness
config to taskcluster task definitions is left for a followup.
Another followup is to move the tooltool manifest paths declared in
some ad-hoc build scripts to taskcluster task definitions as well.
The immediate need for this, though, is to allow to have duplicated jobs
that only differ in their tooltool manifest, without duplicating a
complete mozharness config that will end up stale (the goal being that
really only the tooltool manifest differs, even when the original jobs
change).
--HG--
extra : rebase_source : 3622779926b1b5e86e809c1f6422bd55ef64eed7
2017-06-02 02:28:26 +00:00
|
|
|
manifest_src = os.environ.get('TOOLTOOL_MANIFEST')
|
|
|
|
if not manifest_src:
|
|
|
|
manifest_src = config.get('tooltool_manifest_src')
|
2017-12-21 22:48:14 +00:00
|
|
|
if not manifest_src and not toolchains:
|
|
|
|
return
|
2017-07-18 15:57:26 +00:00
|
|
|
|
2017-03-28 14:41:38 +00:00
|
|
|
cmd = [
|
2017-06-16 19:48:00 +00:00
|
|
|
sys.executable, '-u',
|
|
|
|
os.path.join(dirs['abs_mozilla_dir'], 'mach'),
|
|
|
|
'artifact',
|
|
|
|
'toolchain',
|
|
|
|
'-v',
|
|
|
|
'--retry', '4',
|
2017-07-20 08:56:22 +00:00
|
|
|
'--artifact-manifest',
|
|
|
|
os.path.join(dirs['abs_mozilla_dir'], 'toolchains.json'),
|
2017-03-28 14:41:38 +00:00
|
|
|
]
|
2017-12-26 22:02:21 +00:00
|
|
|
if manifest_src:
|
|
|
|
cmd.extend([
|
|
|
|
'--tooltool-manifest',
|
|
|
|
os.path.join(dirs['abs_mozilla_dir'], manifest_src),
|
|
|
|
'--tooltool-url',
|
|
|
|
config['tooltool_url'],
|
|
|
|
])
|
|
|
|
auth_file = self._get_tooltool_auth_file()
|
|
|
|
if auth_file:
|
|
|
|
cmd.extend(['--authentication-file', auth_file])
|
2017-03-28 14:41:38 +00:00
|
|
|
cache = config.get('tooltool_cache')
|
|
|
|
if cache:
|
2017-06-16 19:48:00 +00:00
|
|
|
cmd.extend(['--cache-dir', cache])
|
2017-12-21 22:48:14 +00:00
|
|
|
if toolchains:
|
|
|
|
cmd.extend(toolchains.split())
|
2017-03-28 14:41:38 +00:00
|
|
|
self.info(str(cmd))
|
|
|
|
self.run_command(cmd, cwd=dirs['abs_mozilla_dir'], halt_on_failure=True)
|
|
|
|
|
2017-06-16 19:48:00 +00:00
|
|
|
def _get_tooltool_auth_file(self):
|
|
|
|
# set the default authentication file based on platform; this
|
|
|
|
# corresponds to where puppet puts the token
|
|
|
|
if 'tooltool_authentication_file' in self.config:
|
|
|
|
fn = self.config['tooltool_authentication_file']
|
|
|
|
elif self._is_windows():
|
|
|
|
fn = r'c:\builds\relengapi.tok'
|
|
|
|
else:
|
|
|
|
fn = '/builds/relengapi.tok'
|
|
|
|
|
|
|
|
# if the file doesn't exist, don't pass it to tooltool (it will just
|
|
|
|
# fail). In taskcluster, this will work OK as the relengapi-proxy will
|
|
|
|
# take care of auth. Everywhere else, we'll get auth failures if
|
|
|
|
# necessary.
|
|
|
|
if os.path.exists(fn):
|
|
|
|
return fn
|
|
|
|
|
2017-03-28 20:26:56 +00:00
|
|
|
def _get_mozconfig(self):
|
|
|
|
"""assign mozconfig."""
|
|
|
|
c = self.config
|
|
|
|
dirs = self.query_abs_dirs()
|
|
|
|
abs_mozconfig_path = ''
|
|
|
|
|
|
|
|
# first determine the mozconfig path
|
|
|
|
if c.get('src_mozconfig'):
|
|
|
|
self.info('Using in-tree mozconfig')
|
|
|
|
abs_mozconfig_path = os.path.join(dirs['abs_mozilla_dir'], c['src_mozconfig'])
|
|
|
|
else:
|
|
|
|
self.fatal("'src_mozconfig' must be in the config "
|
|
|
|
"in order to determine the mozconfig.")
|
|
|
|
|
|
|
|
# print its contents
|
|
|
|
self.read_from_file(abs_mozconfig_path, error_level=FATAL)
|
|
|
|
|
|
|
|
# finally, copy the mozconfig to a path that 'mach build' expects it to be
|
|
|
|
self.copyfile(abs_mozconfig_path, os.path.join(dirs['abs_mozilla_dir'], '.mozconfig'))
|
|
|
|
|
|
|
|
def _run_configure(self):
|
|
|
|
dirs = self.query_abs_dirs()
|
2017-05-04 01:10:33 +00:00
|
|
|
command = [sys.executable, 'mach', '--log-no-times', 'configure']
|
2017-03-28 20:26:56 +00:00
|
|
|
return self.run_command(
|
|
|
|
command=command,
|
|
|
|
cwd=dirs['abs_mozilla_dir'],
|
|
|
|
output_timeout=60*3,
|
|
|
|
halt_on_failure=True,
|
|
|
|
)
|
|
|
|
|
2017-03-28 14:41:38 +00:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
repack = Repackage()
|
|
|
|
repack.run_and_exit()
|