Bug 1726573 - [taskgraph] Remove remaining uses of 'six', r=taskgraph-reviewers,bhearsum

Differential Revision: https://phabricator.services.mozilla.com/D123235
This commit is contained in:
Andrew Halberstadt 2021-08-24 19:35:47 +00:00
parent d024a31639
commit aac357d350
23 changed files with 73 additions and 117 deletions

View File

@ -9,8 +9,6 @@ import logging
import os
import re
import six
from taskgraph.util.taskcluster import list_artifacts, get_artifact, get_task_definition
from ..util.parameterization import resolve_task_references
from .registry import register_callback_action
@ -194,9 +192,9 @@ def create_isolate_failure_tasks(task_definition, failures, level, times):
failure_path = "testing/web-platform/tests" + failure_path
if is_windows:
failure_path = "\\".join(failure_path.split("/"))
task_definition["payload"]["env"][
"MOZHARNESS_TEST_PATHS"
] = six.ensure_text(json.dumps({suite: [failure_path]}, sort_keys=True))
task_definition["payload"]["env"]["MOZHARNESS_TEST_PATHS"] = json.dumps(
{suite: [failure_path]}, sort_keys=True
)
logger.info(
"Creating task for path {} with command {}".format(

View File

@ -10,7 +10,6 @@ import time
import sys
from collections import defaultdict
import six
from redo import retry
import yaml
@ -342,8 +341,8 @@ def get_decision_parameters(graph_config, options):
# use the pushdate as build_date if given, else use current time
parameters["build_date"] = parameters["pushdate"] or int(time.time())
# moz_build_date is the build identifier based on build_date
parameters["moz_build_date"] = six.ensure_text(
time.strftime("%Y%m%d%H%M%S", time.gmtime(parameters["build_date"]))
parameters["moz_build_date"] = time.strftime(
"%Y%m%d%H%M%S", time.gmtime(parameters["build_date"])
)
project = parameters["project"]

View File

@ -5,7 +5,6 @@
import json
import os
import six
import tarfile
from io import BytesIO
@ -31,7 +30,7 @@ def get_image_digest(image_name):
def load_image_by_name(image_name, tag=None):
params = {"level": six.ensure_text(os.environ.get("MOZ_SCM_LEVEL", "3"))}
params = {"level": os.environ.get("MOZ_SCM_LEVEL", "3")}
tasks = load_tasks_for_kind(params, "docker-image")
task = tasks[f"docker-image-{image_name}"]
deadline = None

View File

@ -6,7 +6,6 @@ import logging
import os
import copy
import attr
from six import ensure_text
from . import filter_tasks
from .graph import Graph
@ -134,7 +133,7 @@ class TaskGraphGenerator:
"""
if root_dir is None:
root_dir = "taskcluster/ci"
self.root_dir = ensure_text(root_dir)
self.root_dir = root_dir
self._parameters = parameters
self._decision_task_id = decision_task_id
self._write_artifacts = write_artifacts

View File

@ -5,8 +5,7 @@
from fnmatch import fnmatch
from collections import defaultdict
from six.moves.urllib.parse import urlsplit
from urllib.parse import urlsplit
from taskgraph.optimize import register_strategy, registry, OptimizationStrategy
from taskgraph.util.bugbug import (

View File

@ -18,8 +18,6 @@ from voluptuous import (
Schema,
)
import six
from . import GECKO
from .util.attributes import release_level
@ -32,7 +30,7 @@ class ParameterMismatch(Exception):
@memoize
def get_head_ref():
return six.ensure_text(get_repository_object(GECKO).head_ref)
return get_repository_object(GECKO).head_ref
def get_contents(path):
@ -145,7 +143,7 @@ class Parameters(ReadOnlyDict):
"hg_branch": "default",
"level": "3",
"message": "",
"moz_build_date": six.ensure_text(now.strftime("%Y%m%d%H%M%S")),
"moz_build_date": now.strftime("%Y%m%d%H%M%S"),
"next_version": None,
"optimize_strategies": None,
"optimize_target_tasks": True,
@ -225,7 +223,7 @@ class Parameters(ReadOnlyDict):
"""
Whether this is a staging release or not.
:return six.text_type: One of "production" or "staging".
:return str: One of "production" or "staging".
"""
return release_level(self["project"])

View File

@ -5,7 +5,6 @@
import json
from pipes import quote as shell_quote
import six
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.scriptworker import get_release_config
from taskgraph.util.schema import resolve_keyed_by
@ -95,8 +94,8 @@ def handle_keyed_by(config, jobs):
if "extra-config" in job["run"]:
env = job["worker"].setdefault("env", {})
env["EXTRA_MOZHARNESS_CONFIG"] = six.ensure_text(
json.dumps(job["run"]["extra-config"], sort_keys=True)
env["EXTRA_MOZHARNESS_CONFIG"] = json.dumps(
job["run"]["extra-config"], sort_keys=True
)
del job["run"]["extra-config"]

View File

@ -8,7 +8,6 @@ import os
import re
import json
import six
import mozpack.path as mozpath
import taskgraph
from taskgraph.transforms.base import TransformSequence
@ -163,9 +162,7 @@ def fill_template(config, tasks):
"PROJECT": config.params["project"],
"IMAGE_NAME": image_name,
"DOCKER_IMAGE_ZSTD_LEVEL": zstd_level,
"DOCKER_BUILD_ARGS": {
"task-reference": six.ensure_text(json.dumps(args))
},
"DOCKER_BUILD_ARGS": {"task-reference": json.dumps(args)},
"GECKO_BASE_REPOSITORY": config.params["base_repository"],
"GECKO_HEAD_REPOSITORY": config.params["head_repository"],
"GECKO_HEAD_REV": config.params["head_rev"],

View File

@ -13,7 +13,6 @@ run-using handlers in `taskcluster/taskgraph/transforms/job`.
import copy
import logging
import json
import six
import mozpack.path as mozpath
@ -355,10 +354,8 @@ def use_fetches(config, jobs):
env = worker.setdefault("env", {})
env["MOZ_FETCHES"] = {
"task-reference": six.ensure_text(
json.dumps(
sorted(job_fetches, key=lambda x: sorted(x.items())), sort_keys=True
)
"task-reference": json.dumps(
sorted(job_fetches, key=lambda x: sorted(x.items())), sort_keys=True
)
}
# The path is normalized to an absolute path in run-task

View File

@ -10,7 +10,6 @@ way, and certainly anything using mozharness should use this approach.
import json
import six
from textwrap import dedent
from taskgraph.util.schema import Schema
@ -184,9 +183,7 @@ def mozharness_on_docker_worker_setup(config, job, taskdesc):
extra_config = run.pop("extra-config", {})
extra_config["objdir"] = "obj-build"
env["EXTRA_MOZHARNESS_CONFIG"] = six.ensure_text(
json.dumps(extra_config, sort_keys=True)
)
env["EXTRA_MOZHARNESS_CONFIG"] = json.dumps(extra_config, sort_keys=True)
if "job-script" in run:
env["JOB_SCRIPT"] = run["job-script"]
@ -278,9 +275,7 @@ def mozharness_on_generic_worker(config, job, taskdesc):
extra_config = run.pop("extra-config", {})
extra_config["objdir"] = "obj-build"
env["EXTRA_MOZHARNESS_CONFIG"] = six.ensure_text(
json.dumps(extra_config, sort_keys=True)
)
env["EXTRA_MOZHARNESS_CONFIG"] = json.dumps(extra_config, sort_keys=True)
# The windows generic worker uses batch files to pass environment variables
# to commands. Setting a variable to empty in a batch file unsets, so if

View File

@ -7,7 +7,6 @@ import json
import os
import re
import six
from voluptuous import Required, Optional
from taskgraph.util.taskcluster import get_artifact_url
@ -198,7 +197,7 @@ def mozharness_test_on_docker(config, job, taskdesc):
"test_packages_url": test_packages_url(taskdesc),
}
env["EXTRA_MOZHARNESS_CONFIG"] = {
"task-reference": six.ensure_text(json.dumps(extra_config, sort_keys=True))
"task-reference": json.dumps(extra_config, sort_keys=True)
}
# Bug 1634554 - pass in decision task artifact URL to mozharness for WPT.
@ -216,8 +215,8 @@ def mozharness_test_on_docker(config, job, taskdesc):
command.extend(mozharness.get("extra-options", []))
if test.get("test-manifests"):
env["MOZHARNESS_TEST_PATHS"] = six.ensure_text(
json.dumps({test["suite"]: test["test-manifests"]}, sort_keys=True)
env["MOZHARNESS_TEST_PATHS"] = json.dumps(
{test["suite"]: test["test-manifests"]}, sort_keys=True
)
# TODO: remove the need for run['chunked']
@ -349,7 +348,7 @@ def mozharness_test_on_generic_worker(config, job, taskdesc):
"test_packages_url": test_packages_url(taskdesc),
}
env["EXTRA_MOZHARNESS_CONFIG"] = {
"task-reference": six.ensure_text(json.dumps(extra_config, sort_keys=True))
"task-reference": json.dumps(extra_config, sort_keys=True)
}
# Bug 1634554 - pass in decision task artifact URL to mozharness for WPT.
@ -412,8 +411,8 @@ def mozharness_test_on_generic_worker(config, job, taskdesc):
mh_command.append("--blob-upload-branch=" + config.params["project"])
if test.get("test-manifests"):
env["MOZHARNESS_TEST_PATHS"] = six.ensure_text(
json.dumps({test["suite"]: test["test-manifests"]}, sort_keys=True)
env["MOZHARNESS_TEST_PATHS"] = json.dumps(
{test["suite"]: test["test-manifests"]}, sort_keys=True
)
# TODO: remove the need for run['chunked']

View File

@ -10,8 +10,6 @@ from collections import defaultdict
import json
import logging
import six
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.partners import (
apply_partner_priority,
@ -114,8 +112,8 @@ def add_command_arguments(config, tasks):
}
for upstream_artifact, platform, locale in upstream_artifacts
]
worker.setdefault("env", {})["ATTRIBUTION_CONFIG"] = six.ensure_text(
json.dumps(attributions, sort_keys=True)
worker.setdefault("env", {})["ATTRIBUTION_CONFIG"] = json.dumps(
attributions, sort_keys=True
)
worker["artifacts"] = [
{

View File

@ -9,8 +9,6 @@ from copy import deepcopy
from datetime import date, timedelta
import json
from six import ensure_text
from voluptuous import (
Any,
Optional,
@ -275,8 +273,8 @@ def setup_perftest_extra_options(config, jobs):
def pass_perftest_options(config, jobs):
for job in jobs:
env = job.setdefault("worker", {}).setdefault("env", {})
env["PERFTEST_OPTIONS"] = ensure_text(
json.dumps(config.params["try_task_config"].get("perftest-options"))
env["PERFTEST_OPTIONS"] = json.dumps(
config.params["try_task_config"].get("perftest-options")
)
yield job

View File

@ -14,7 +14,6 @@ import os
import re
import time
from copy import deepcopy
import six
import attr
@ -571,7 +570,7 @@ def build_docker_worker_payload(config, task, task_def):
if out_of_tree_image:
name_hash = hashlib.sha256(
six.ensure_binary(out_of_tree_image)
out_of_tree_image.encode("utf-8")
).hexdigest()
suffix += name_hash[0:12]
@ -604,9 +603,7 @@ def build_docker_worker_payload(config, task, task_def):
# And send down volumes information to run-task as well.
if run_task and worker.get("volumes"):
payload["env"]["TASKCLUSTER_VOLUMES"] = ";".join(
[six.ensure_text(s) for s in sorted(worker["volumes"])]
)
payload["env"]["TASKCLUSTER_VOLUMES"] = ";".join(sorted(worker["volumes"]))
if payload.get("cache") and skip_untrusted:
payload["env"]["TASKCLUSTER_UNTRUSTED_CACHES"] = "1"
@ -2049,10 +2046,8 @@ def check_caches_are_volumes(task):
to be declared as Docker volumes. This check won't catch all offenders.
But it is better than nothing.
"""
volumes = {six.ensure_text(s) for s in task["worker"]["volumes"]}
paths = {
six.ensure_text(c["mount-point"]) for c in task["worker"].get("caches", [])
}
volumes = {s for s in task["worker"]["volumes"]}
paths = {c["mount-point"] for c in task["worker"].get("caches", [])}
missing = paths - volumes
if not missing:

View File

@ -5,8 +5,7 @@
Transform the beetmover task into an actual task description.
"""
import six.moves.urllib.parse as urlparse
from urllib.parse import urlsplit
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import resolve_keyed_by
@ -110,7 +109,7 @@ def add_command(config, tasks):
"update-verify.cfg",
]
repo_path = urlparse.urlsplit(get_branch_repo(config)).path.lstrip("/")
repo_path = urlsplit(get_branch_repo(config)).path.lstrip("/")
command.extend(["--repo-path", repo_path])
if release_config.get("partial_versions"):

View File

@ -162,6 +162,6 @@ def release_level(project):
"""
Whether this is a staging release or not.
:return six.text_type: One of "production" or "staging".
:return str: One of "production" or "staging".
"""
return "production" if project in RELEASE_PROJECTS else "staging"

View File

@ -6,9 +6,6 @@
import hashlib
import time
import six
TARGET_CACHE_INDEX = "{trust_domain}.cache.level-{level}.{type}.{name}.hash.{digest}"
EXTRA_CACHE_INDEXES = [
"{trust_domain}.cache.level-{level}.{type}.{name}.latest",
@ -44,7 +41,7 @@ def add_optimization(
if (digest is None) == (digest_data is None):
raise Exception("Must pass exactly one of `digest` and `digest_data`.")
if digest is None:
digest = hashlib.sha256(six.ensure_binary("\n".join(digest_data))).hexdigest()
digest = hashlib.sha256("\n".join(digest_data).encode("utf-8")).hexdigest()
subs = {
"trust_domain": config.graph_config["trust-domain"],

View File

@ -9,11 +9,9 @@ import os
import re
import requests
import requests_unixsocket
import six
import sys
from six.moves.urllib.parse import quote, urlencode, urlunparse
from six.moves.collections_abc import Mapping
from collections.abc import Mapping
from urllib.parse import quote, urlencode, urlunparse
from mozbuild.util import memoize
from mozpack.files import GeneratedFile
@ -185,7 +183,7 @@ class HashingWriter:
self._writer.write(buf)
def hexdigest(self):
return six.ensure_text(self._hash.hexdigest())
return self._hash.hexdigest()
def create_context_tar(topsrcdir, context_dir, out_path, image_name, args):
@ -260,9 +258,7 @@ def stream_context_tar(topsrcdir, context_dir, out_file, image_name, args):
archive_path = os.path.join("topsrcdir", p)
archive_files[archive_path] = fs_path
archive_files["Dockerfile"] = GeneratedFile(
b"".join(six.ensure_binary(s) for s in content)
)
archive_files["Dockerfile"] = GeneratedFile("".join(content).encode("utf-8"))
writer = HashingWriter(out_file)
create_tar_gz_from_files(writer, archive_files, f"{image_name}.tar")
@ -333,6 +329,6 @@ def parse_volumes(image):
"convert to multiple entries"
)
volumes |= {six.ensure_text(v) for v in v.split()}
volumes |= {v.decode("utf-8") for v in v.split()}
return volumes

View File

@ -6,7 +6,6 @@ from mozbuild.util import memoize
import mozpack.path as mozpath
from mozversioncontrol import get_repository_object
import hashlib
import six
@memoize
@ -46,11 +45,9 @@ def hash_paths(base_path, patterns):
if path.endswith((".pyc", ".pyd", ".pyo")):
continue
h.update(
six.ensure_binary(
"{} {}\n".format(
hash_path(mozpath.abspath(mozpath.join(base_path, path))),
mozpath.normsep(path),
)
)
"{} {}\n".format(
hash_path(mozpath.abspath(mozpath.join(base_path, path))),
mozpath.normsep(path),
).encode("utf-8")
)
return h.hexdigest()

View File

@ -6,7 +6,6 @@
import logging
import requests
import six
import subprocess
from redo import retry
@ -100,19 +99,17 @@ def get_push_data(repository, project, push_id_start, push_id_end):
def get_hg_revision_branch(root, revision):
"""Given the parameters for a revision, find the hg_branch (aka
relbranch) of the revision."""
return six.ensure_text(
subprocess.check_output(
[
"hg",
"identify",
"-T",
"{branch}",
"--rev",
revision,
],
cwd=root,
universal_newlines=True,
)
return subprocess.check_output(
[
"hg",
"identify",
"-T",
"{branch}",
"--rev",
revision,
],
cwd=root,
universal_newlines=True,
)
@ -120,12 +117,12 @@ def get_hg_revision_branch(root, revision):
# revision indicated by GECKO_HEAD_REF, so all that remains is to see what the
# current revision is. Mercurial refers to that as `.`.
def get_hg_commit_message(root):
return six.ensure_text(
subprocess.check_output(["hg", "log", "-r", ".", "-T", "{desc}"], cwd=root)
return subprocess.check_output(
["hg", "log", "-r", ".", "-T", "{desc}"], cwd=root, universal_newlines=True
)
def calculate_head_rev(root):
return six.ensure_text(
subprocess.check_output(["hg", "log", "-r", ".", "-T", "{node}"], cwd=root)
return subprocess.check_output(
["hg", "log", "-r", ".", "-T", "{node}"], cwd=root, universal_newlines=True
)

View File

@ -3,18 +3,19 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from copy import deepcopy
import json
import logging
import os
from redo import retry
import requests
import xml.etree.ElementTree as ET
from copy import deepcopy
from urllib.parse import urlencode
import yaml
from redo import retry
from taskgraph.util.attributes import release_level
from taskgraph.util.schema import resolve_keyed_by
import six
import yaml
# Suppress chatty requests logging
logging.getLogger("requests").setLevel(logging.WARNING)
@ -551,5 +552,5 @@ def generate_attribution_code(defaults, partner):
if partner.get("experiment"):
params["experiment"] = partner["experiment"]
code = six.moves.urllib.parse.urlencode(params)
code = urlencode(params)
return code

View File

@ -7,7 +7,6 @@ import os
import datetime
import functools
import requests
import six
import logging
import taskcluster_urls as liburls
from mozbuild.util import memoize
@ -38,7 +37,7 @@ def get_root_url(use_proxy):
is not set."""
if use_proxy:
try:
return six.ensure_text(os.environ["TASKCLUSTER_PROXY_URL"])
return os.environ["TASKCLUSTER_PROXY_URL"]
except KeyError:
if "TASK_ID" not in os.environ:
raise RuntimeError(
@ -61,7 +60,7 @@ def get_root_url(use_proxy):
" with taskcluster-proxy" if "TASKCLUSTER_PROXY_URL" in os.environ else "",
)
)
return six.ensure_text(os.environ["TASKCLUSTER_ROOT_URL"])
return os.environ["TASKCLUSTER_ROOT_URL"]
def requests_retry_session(
@ -129,7 +128,7 @@ def get_artifact_url(task_id, path, use_proxy=False):
artifact_tmpl = liburls.api(
get_root_url(False), "queue", "v1", "task/{}/artifacts/{}"
)
data = six.ensure_text(artifact_tmpl.format(task_id, path))
data = artifact_tmpl.format(task_id, path)
if use_proxy:
# Until Bug 1405889 is deployed, we can't download directly
# from the taskcluster-proxy. Work around by using the /bewit
@ -141,7 +140,7 @@ def get_artifact_url(task_id, path, use_proxy=False):
data=data,
allow_redirects=False,
)
return six.ensure_text(response.text)
return response.text
return data

View File

@ -6,9 +6,9 @@
import logging
import re
import os
import sys
import attr
import six
from .. import GECKO
from .treeherder import join_symbol
@ -238,12 +238,12 @@ def verify_dependency_tiers(task, taskgraph, scratch_pad, graph_config, paramete
tiers = scratch_pad
if task is not None:
tiers[task.label] = (
task.task.get("extra", {}).get("treeherder", {}).get("tier", six.MAXSIZE)
task.task.get("extra", {}).get("treeherder", {}).get("tier", sys.maxsize)
)
else:
def printable_tier(tier):
if tier == six.MAXSIZE:
if tier == sys.maxsize:
return "unknown"
return tier