mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-10-08 19:04:45 +00:00
Bug 1569059 - Run 'mach bootstrap' with Python 3 r=firefox-build-system-reviewers,mshal,Callek
Differential Revision: https://phabricator.services.mozilla.com/D60898 --HG-- extra : moz-landing-system : lando
This commit is contained in:
parent
217f810381
commit
d259591a1c
1
mach
1
mach
@ -17,7 +17,6 @@ py2commands="
|
||||
android-emulator
|
||||
artifact
|
||||
awsy-test
|
||||
bootstrap
|
||||
browsertime
|
||||
build
|
||||
build-backend
|
||||
|
@ -917,7 +917,7 @@ class MachCommandBase(MozbuildObject):
|
||||
self._ensure_state_subdir_exists('.')
|
||||
logfile = self._get_state_filename('last_log.json')
|
||||
try:
|
||||
fd = open(logfile, "wb")
|
||||
fd = open(logfile, 'wt')
|
||||
self.log_manager.add_json_handler(fd)
|
||||
except Exception as e:
|
||||
self.log(logging.WARNING, 'mach', {'error': str(e)},
|
||||
|
@ -7,6 +7,7 @@ import logging
|
||||
import os
|
||||
import copy
|
||||
import attr
|
||||
import six
|
||||
from six import text_type, ensure_text
|
||||
|
||||
from . import filter_tasks
|
||||
@ -246,7 +247,7 @@ class TaskGraphGenerator(object):
|
||||
self.verify_kinds(kinds)
|
||||
|
||||
edges = set()
|
||||
for kind in kinds.itervalues():
|
||||
for kind in six.itervalues(kinds):
|
||||
for dep in kind.config.get('kind-dependencies', []):
|
||||
edges.add((kind.name, dep, 'kind-dependency'))
|
||||
kind_graph = Graph(set(kinds), edges)
|
||||
@ -280,7 +281,7 @@ class TaskGraphGenerator(object):
|
||||
logger.info("Generating full task graph")
|
||||
edges = set()
|
||||
for t in full_task_set:
|
||||
for depname, dep in t.dependencies.iteritems():
|
||||
for depname, dep in six.iteritems(t.dependencies):
|
||||
edges.add((t.label, dep, depname))
|
||||
|
||||
full_task_graph = TaskGraph(all_tasks,
|
||||
@ -307,11 +308,11 @@ class TaskGraphGenerator(object):
|
||||
|
||||
logger.info("Generating target task graph")
|
||||
# include all docker-image build tasks here, in case they are needed for a graph morph
|
||||
docker_image_tasks = set(t.label for t in full_task_graph.tasks.itervalues()
|
||||
docker_image_tasks = set(t.label for t in six.itervalues(full_task_graph.tasks)
|
||||
if t.attributes['kind'] == 'docker-image')
|
||||
# include all tasks with `always_target` set
|
||||
if parameters["tasks_for"] == "hg-push":
|
||||
always_target_tasks = set(t.label for t in full_task_graph.tasks.itervalues()
|
||||
always_target_tasks = set(t.label for t in six.itervalues(full_task_graph.tasks)
|
||||
if t.attributes.get('always_target'))
|
||||
else:
|
||||
always_target_tasks = set()
|
||||
@ -354,7 +355,7 @@ class TaskGraphGenerator(object):
|
||||
def _run_until(self, name):
|
||||
while name not in self._run_results:
|
||||
try:
|
||||
k, v = self._run.next()
|
||||
k, v = next(self._run)
|
||||
except StopIteration:
|
||||
raise AttributeError("No such run result {}".format(name))
|
||||
self._run_results[k] = v
|
||||
@ -367,7 +368,7 @@ class TaskGraphGenerator(object):
|
||||
parameters_dict = dict(**parameters)
|
||||
verify_docs(
|
||||
filename="parameters.rst",
|
||||
identifiers=parameters_dict.keys(),
|
||||
identifiers=list(parameters_dict),
|
||||
appearing_as="inline-literal"
|
||||
)
|
||||
|
||||
@ -380,7 +381,7 @@ class TaskGraphGenerator(object):
|
||||
|
||||
def verify_attributes(self, all_tasks):
|
||||
attribute_set = set()
|
||||
for label, task in all_tasks.iteritems():
|
||||
for label, task in six.iteritems(all_tasks):
|
||||
attribute_set.update(task.attributes.keys())
|
||||
verify_docs(
|
||||
filename="attributes.rst",
|
||||
|
@ -6,6 +6,8 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
|
||||
import six
|
||||
|
||||
from ..util.templates import merge
|
||||
from ..util.yaml import load_yaml
|
||||
|
||||
@ -33,7 +35,7 @@ def loader(kind, path, config, params, loaded_tasks):
|
||||
"""
|
||||
def jobs():
|
||||
defaults = config.get('job-defaults')
|
||||
for name, job in config.get('jobs', {}).iteritems():
|
||||
for name, job in six.iteritems(config.get('jobs', {})):
|
||||
if defaults:
|
||||
job = merge(defaults, job)
|
||||
job['job-from'] = 'kind.yml'
|
||||
@ -46,7 +48,7 @@ def loader(kind, path, config, params, loaded_tasks):
|
||||
if defaults:
|
||||
file_defaults = merge(defaults, file_defaults or {})
|
||||
|
||||
for name, job in tasks.iteritems():
|
||||
for name, job in six.iteritems(tasks):
|
||||
if file_defaults:
|
||||
job = merge(file_defaults, job)
|
||||
job['job-from'] = filename
|
||||
|
@ -8,6 +8,7 @@ from .graph import Graph
|
||||
from .task import Task
|
||||
|
||||
import attr
|
||||
import six
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
@ -39,7 +40,7 @@ class TaskGraph(object):
|
||||
|
||||
def __iter__(self):
|
||||
"Iterate over tasks in undefined order"
|
||||
return self.tasks.itervalues()
|
||||
return six.itervalues(self.tasks)
|
||||
|
||||
def to_json(self):
|
||||
"Return a JSON-able object representing the task graph, as documented"
|
||||
|
@ -15,6 +15,7 @@ import os
|
||||
import re
|
||||
import time
|
||||
from copy import deepcopy
|
||||
import six
|
||||
from six import text_type
|
||||
|
||||
import attr
|
||||
@ -639,7 +640,8 @@ def build_docker_worker_payload(config, task, task_def):
|
||||
suffix = '{}-{}'.format(cache_version, _run_task_suffix())
|
||||
|
||||
if out_of_tree_image:
|
||||
name_hash = hashlib.sha256(out_of_tree_image).hexdigest()
|
||||
name_hash = hashlib.sha256(
|
||||
six.ensure_binary(out_of_tree_image)).hexdigest()
|
||||
suffix += name_hash[0:12]
|
||||
|
||||
else:
|
||||
@ -673,7 +675,7 @@ def build_docker_worker_payload(config, task, task_def):
|
||||
# And send down volumes information to run-task as well.
|
||||
if run_task and worker.get('volumes'):
|
||||
payload['env']['TASKCLUSTER_VOLUMES'] = ';'.join(
|
||||
sorted(worker['volumes']))
|
||||
[six.ensure_text(s) for s in sorted(worker['volumes'])])
|
||||
|
||||
if payload.get('cache') and skip_untrusted:
|
||||
payload['env']['TASKCLUSTER_UNTRUSTED_CACHES'] = '1'
|
||||
@ -2050,8 +2052,9 @@ def check_caches_are_volumes(task):
|
||||
to be declared as Docker volumes. This check won't catch all offenders.
|
||||
But it is better than nothing.
|
||||
"""
|
||||
volumes = set(task['worker']['volumes'])
|
||||
paths = set(c['mount-point'] for c in task['worker'].get('caches', []))
|
||||
volumes = set(six.ensure_text(s) for s in task['worker']['volumes'])
|
||||
paths = set(six.ensure_text(c['mount-point'])
|
||||
for c in task['worker'].get('caches', []))
|
||||
missing = paths - volumes
|
||||
|
||||
if not missing:
|
||||
|
@ -296,7 +296,7 @@ test_description_schema = Schema({
|
||||
# in the TEST_VARIANTS object.
|
||||
Optional('variants'): optionally_keyed_by(
|
||||
'test-platform', 'project',
|
||||
Any(TEST_VARIANTS.keys())),
|
||||
Any(list(TEST_VARIANTS))),
|
||||
|
||||
# Whether to run this task with e10s. If false, run
|
||||
# without e10s; if true, run with e10s; if 'both', run one task with and
|
||||
|
@ -6,6 +6,8 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import re
|
||||
|
||||
import six
|
||||
|
||||
|
||||
INTEGRATION_PROJECTS = {
|
||||
'autoland',
|
||||
@ -59,7 +61,7 @@ def attrmatch(attributes, **kwargs):
|
||||
must be in the set. A callable is called with the attribute value. If an
|
||||
attribute is specified as a keyword argument but not present in the
|
||||
attributes, the result is False."""
|
||||
for kwkey, kwval in kwargs.iteritems():
|
||||
for kwkey, kwval in six.iteritems(kwargs):
|
||||
if kwkey not in attributes:
|
||||
return False
|
||||
attval = attributes[kwkey]
|
||||
@ -84,7 +86,7 @@ def keymatch(attributes, target):
|
||||
return [attributes[target]]
|
||||
|
||||
# regular expression match
|
||||
matches = [v for k, v in attributes.iteritems() if re.match(k + '$', target)]
|
||||
matches = [v for k, v in six.iteritems(attributes) if re.match(k + '$', target)]
|
||||
if matches:
|
||||
return matches
|
||||
|
||||
|
@ -7,6 +7,8 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
import hashlib
|
||||
import time
|
||||
|
||||
import six
|
||||
|
||||
|
||||
TARGET_CACHE_INDEX = (
|
||||
'{trust_domain}.cache.level-{level}.{type}.{name}.hash.{digest}'
|
||||
@ -39,7 +41,8 @@ def add_optimization(config, taskdesc, cache_type, cache_name, digest=None, dige
|
||||
if (digest is None) == (digest_data is None):
|
||||
raise Exception("Must pass exactly one of `digest` and `digest_data`.")
|
||||
if digest is None:
|
||||
digest = hashlib.sha256('\n'.join(digest_data)).hexdigest()
|
||||
digest = hashlib.sha256(
|
||||
six.ensure_binary('\n'.join(digest_data))).hexdigest()
|
||||
|
||||
subs = {
|
||||
'trust_domain': config.graph_config['trust-domain'],
|
||||
|
@ -5,6 +5,7 @@
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import hashlib
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
@ -233,14 +234,13 @@ def stream_context_tar(topsrcdir, context_dir, out_file, prefix, args=None):
|
||||
archive_files[archive_path] = source_path
|
||||
|
||||
# Parse Dockerfile for special syntax of extra files to include.
|
||||
with open(os.path.join(context_dir, 'Dockerfile'), 'rb') as fh:
|
||||
with io.open(os.path.join(context_dir, 'Dockerfile'), 'r') as fh:
|
||||
for line in fh:
|
||||
if line.startswith('# %ARG'):
|
||||
p = line[len('# %ARG '):].strip()
|
||||
if not args or p not in args:
|
||||
raise Exception('missing argument: {}'.format(p))
|
||||
replace.append((re.compile(r'\${}\b'.format(p)),
|
||||
args[p].encode('ascii')))
|
||||
replace.append((re.compile(r'\${}\b'.format(p)), args[p]))
|
||||
continue
|
||||
|
||||
for regexp, s in replace:
|
||||
@ -275,7 +275,7 @@ def stream_context_tar(topsrcdir, context_dir, out_file, prefix, args=None):
|
||||
archive_files[archive_path] = fs_path
|
||||
|
||||
archive_files[os.path.join(prefix, 'Dockerfile')] = \
|
||||
GeneratedFile(b''.join(content))
|
||||
GeneratedFile(b''.join(six.ensure_binary(s) for s in content))
|
||||
|
||||
writer = HashingWriter(out_file)
|
||||
create_tar_gz_from_files(writer, archive_files, '%s.tar.gz' % prefix)
|
||||
|
@ -7,6 +7,8 @@ from mozbuild.util import memoize
|
||||
from mozpack.files import FileFinder
|
||||
import mozpack.path as mozpath
|
||||
import hashlib
|
||||
import io
|
||||
import six
|
||||
|
||||
|
||||
@memoize
|
||||
@ -15,7 +17,7 @@ def hash_path(path):
|
||||
|
||||
Returns the SHA-256 hash in hex form.
|
||||
"""
|
||||
with open(path) as fh:
|
||||
with io.open(path, mode='rb') as fh:
|
||||
return hashlib.sha256(fh.read()).hexdigest()
|
||||
|
||||
|
||||
@ -38,8 +40,8 @@ def hash_paths(base_path, patterns):
|
||||
else:
|
||||
raise Exception('%s did not match anything' % pattern)
|
||||
for path in sorted(files.keys()):
|
||||
h.update('{} {}\n'.format(
|
||||
h.update(six.ensure_binary('{} {}\n'.format(
|
||||
hash_path(mozpath.abspath(mozpath.join(base_path, path))),
|
||||
mozpath.normsep(path)
|
||||
))
|
||||
)))
|
||||
return h.hexdigest()
|
||||
|
@ -34,12 +34,15 @@ def evaluate_keyed_by(value, item_name, attributes):
|
||||
default: 12
|
||||
"""
|
||||
while True:
|
||||
if not isinstance(value, dict) or len(value) != 1 or not value.keys()[0].startswith('by-'):
|
||||
if not isinstance(value, dict) or len(value) != 1:
|
||||
return value
|
||||
value_key = next(iter(value))
|
||||
if not value_key.startswith('by-'):
|
||||
return value
|
||||
|
||||
keyed_by = value.keys()[0][3:] # strip off 'by-' prefix
|
||||
keyed_by = value_key[3:] # strip off 'by-' prefix
|
||||
key = attributes.get(keyed_by)
|
||||
alternatives = value.values()[0]
|
||||
alternatives = next(iter(value.values()))
|
||||
|
||||
if len(alternatives) == 1 and 'default' in alternatives:
|
||||
# Error out when only 'default' is specified as only alternatives,
|
||||
|
@ -128,9 +128,10 @@ def resolve_keyed_by(item, field, item_name, **extra_values):
|
||||
# they can be whitelisted here.
|
||||
WHITELISTED_SCHEMA_IDENTIFIERS = [
|
||||
# upstream-artifacts are handed directly to scriptWorker, which expects interCaps
|
||||
lambda path: "[u'upstream-artifacts']" in path,
|
||||
lambda path: ("[u'test_name']" in path or "[u'json_location']" in path
|
||||
or "[u'video_location']" in path),
|
||||
lambda path: "[{!r}]".format(u'upstream-artifacts') in path,
|
||||
lambda path: ("[{!r}]".format(u'test_name') in path or
|
||||
"[{!r}]".format(u'json_location') in path or
|
||||
"[{!r}]".format(u'video_location') in path),
|
||||
]
|
||||
|
||||
|
||||
|
@ -20,7 +20,7 @@ def merge_to(source, dest):
|
||||
|
||||
for key, value in source.items():
|
||||
if isinstance(value, dict) and len(value) == 1 and \
|
||||
value.keys()[0].startswith('by-'):
|
||||
list(value)[0].startswith('by-'):
|
||||
# Do not merge by-* values as this is likely to confuse someone
|
||||
dest[key] = value
|
||||
continue
|
||||
|
Loading…
Reference in New Issue
Block a user