Backed out 3 changesets (bug 1850045) for causing decision task bustages CLOSED TREE

Backed out changeset b5870683550d (bug 1850045)
Backed out changeset 4ecd1e0ccff8 (bug 1850045)
Backed out changeset 74ec20ac8f53 (bug 1850045)
This commit is contained in:
Sandor Molnar 2023-09-06 16:43:22 +03:00
parent 8c643cc2fa
commit 9844277299
63 changed files with 329 additions and 482 deletions

View File

@ -7,7 +7,6 @@ loader: gecko_taskgraph.loader.transform:loader
transforms:
- gecko_taskgraph.transforms.split_by_locale:transforms
- gecko_taskgraph.transforms.attribution:transforms
- taskgraph.transforms.task_context
- gecko_taskgraph.transforms.job:transforms
- gecko_taskgraph.transforms.task:transforms
@ -38,10 +37,6 @@ job-defaults:
fetches:
repackage-signing-l10n:
- "{locale}/target.installer.exe"
task-context:
from-file: browser/installer/attribution.yml
substitution-fields:
- run.command
worker:
artifacts:
- name: public/build
@ -67,6 +62,8 @@ job-defaults:
# attribution of other files, which they can append themselves.
- --input
- /builds/worker/fetches/target.installer.exe
command-context:
from-file: browser/installer/attribution.yml
jobs:
win32-devedition/opt:

View File

@ -6,7 +6,6 @@ loader: gecko_taskgraph.loader.transform:loader
transforms:
- gecko_taskgraph.transforms.attribution:transforms
- taskgraph.transforms.task_context
- gecko_taskgraph.transforms.job:transforms
- gecko_taskgraph.transforms.task:transforms
@ -30,10 +29,6 @@ job-defaults:
symbol: Attr
kind: other
tier: 1
task-context:
from-file: browser/installer/attribution.yml
substitution-fields:
- run.command
worker:
artifacts:
- name: public/build
@ -59,6 +54,8 @@ job-defaults:
# attribution of other files, which they can append themselves.
- --input
- /builds/worker/fetches/target.installer.exe
command-context:
from-file: browser/installer/attribution.yml
jobs:
win32-devedition/opt:

View File

@ -21,7 +21,6 @@ only-for-build-platforms:
jobs:
beetmover-apt:
from-deps:
group-by: single-with-filters
from-deps: {}
run-on-releases:
- nightly

View File

@ -19,8 +19,7 @@ only-for-attributes:
jobs:
beetmover-checksums:
from-deps:
group-by: single-with-filters
from-deps: {}
shipping-phase: promote
attributes:
artifact_prefix: public

View File

@ -38,8 +38,7 @@ not-for-build-platforms:
jobs:
beetmover-geckoview:
from-deps:
group-by: single-with-filters
from-deps: {}
attributes:
artifact_map: taskcluster/gecko_taskgraph/manifests/fennec_geckoview.yml
run-on-projects: ['mozilla-release']

View File

@ -16,8 +16,7 @@ kind-dependencies:
jobs:
beetmover-source:
from-deps:
group-by: single-with-filters
from-deps: {}
shipping-phase: promote
attributes:
artifact_map: taskcluster/gecko_taskgraph/manifests/source_files.yml

View File

@ -20,7 +20,6 @@ only-for-attributes:
jobs:
build-mac-notarization:
from-deps:
group-by: single-with-filters
copy-attributes: true
treeherder:
symbol: BMN

View File

@ -25,8 +25,7 @@ only-for-build-platforms:
jobs:
build-mac-signing:
from-deps:
group-by: single-with-filters
from-deps: {}
treeherder:
symbol: BMS
enable-signing-routes: false

View File

@ -6,7 +6,6 @@ loader: gecko_taskgraph.loader.transform:loader
transforms:
- gecko_taskgraph.transforms.source_test:transforms
- taskgraph.transforms.task_context
- gecko_taskgraph.transforms.job:transforms
- gecko_taskgraph.transforms.task:transforms
@ -24,9 +23,6 @@ job-defaults:
treeherder:
kind: test
tier: 3
task-context:
from-object: {}
substitution-fields: []
if-dependencies: [build]
jobs:

View File

@ -21,8 +21,7 @@ jobs:
fxrecord:
name: notify-fxrecord-failure
description: "Desktop Startup Visual Metrics"
from-deps:
group-by: single-with-filters
from-deps: {}
run-on-projects:
- "mozilla-central"
worker-type: performance-hardware/gecko-t-fxrecorder

View File

@ -13,8 +13,9 @@ transforms:
kind-dependencies:
- toolchain
only-for-attributes:
- geckodriver
jobs:
geckodriver-signing:
from-deps:
with-attributes:
geckodriver: [true]
from-deps: {}

View File

@ -18,8 +18,7 @@ only-for-build-platforms:
jobs:
mar-signing-autograph-stage:
from-deps:
group-by: single-with-filters
from-deps: {}
shipping-phase: null
treeherder-group: ms-stage
treeherder:

View File

@ -30,8 +30,7 @@ only-for-build-platforms:
jobs:
mar-signing-l10n:
from-deps:
group-by: single-with-filters
from-deps: {}
shipping-phase: promote
treeherder-group: ms
description-suffix: 'mar signing'

View File

@ -31,8 +31,7 @@ only-for-build-platforms:
jobs:
mar-signing:
from-deps:
group-by: single-with-filters
from-deps: {}
shipping-phase: promote
treeherder-group: ms
description-suffix: 'mar signing'

View File

@ -15,8 +15,7 @@ kind-dependencies:
jobs:
partials-signing:
from-deps:
group-by: single-with-filters
from-deps: {}
shipping-phase: promote
treeherder-group: ps
description-suffix: 'partial signing'

View File

@ -34,6 +34,5 @@ only-for-build-platforms:
jobs:
partials:
from-deps:
group-by: single-with-filters
from-deps: {}
shipping-phase: promote

View File

@ -11,7 +11,6 @@ kind-dependencies:
transforms:
- gecko_taskgraph.transforms.perftest:transforms
- gecko_taskgraph.transforms.source_test:transforms
- taskgraph.transforms.task_context
- gecko_taskgraph.transforms.job:transforms
- gecko_taskgraph.transforms.task:transforms
@ -29,9 +28,6 @@ job-defaults:
treeherder:
kind: other
tier: 3
task-context:
from-object: {}
substitution-fields: []
worker:
taskcluster-proxy: true
max-run-time: 10800

View File

@ -15,8 +15,7 @@ kind-dependencies:
jobs:
release-beetmover-signed-langpacks-checksums:
from-deps:
group-by: single-with-filters
from-deps: {}
shipping-phase: promote
attributes:
artifact_prefix: public

View File

@ -20,8 +20,7 @@ only-for-build-platforms:
jobs:
release-eme-free-repack-beetmover:
from-deps:
group-by: single-with-filters
from-deps: {}
shipping-phase: promote
partner-bucket-scope:
by-release-level:

View File

@ -20,5 +20,4 @@ only-for-build-platforms:
jobs:
release-eme-free-repack-repackage-signing:
from-deps:
group-by: single-with-filters
from-deps: {}

View File

@ -26,8 +26,7 @@ only-for-build-platforms:
jobs:
release-eme-free-repack-repackage:
from-deps:
group-by: single-with-filters
from-deps: {}
upstream-mac-kind:
by-build-type:
debug: release-eme-free-repack-mac-signing

View File

@ -24,8 +24,7 @@ only-for-build-platforms:
jobs:
release-partner-repack-beetmover:
from-deps:
group-by: single-with-filters
from-deps: {}
shipping-phase: promote
partner-bucket-scope:
by-release-level:

View File

@ -25,5 +25,4 @@ only-for-build-platforms:
jobs:
release-partner-repack-repackage-signing:
from-deps:
group-by: single-with-filters
from-deps: {}

View File

@ -27,8 +27,7 @@ only-for-build-platforms:
jobs:
release-partner-repack-repackage:
from-deps:
group-by: single-with-filters
from-deps: {}
upstream-mac-kind:
by-build-type:
debug: release-partner-repack-mac-signing

View File

@ -23,8 +23,7 @@ only-for-build-platforms:
jobs:
repackage-deb-l10n:
from-deps:
group-by: single-with-filters
from-deps: {}
worker-type: b-linux-gcp
worker:
docker-image: {"in-tree": "debian12-repackage"}

View File

@ -28,8 +28,7 @@ only-for-attributes:
jobs:
repackage-deb:
from-deps:
group-by: single-with-filters
from-deps: {}
shipping-phase: promote
worker-type: b-linux-gcp
worker:

View File

@ -35,8 +35,7 @@ only-for-build-platforms:
jobs:
repackage-l10n:
from-deps:
group-by: single-with-filters
from-deps: {}
upstream-mac-kind:
by-build-type:
debug: shippable-l10n-mac-signing

View File

@ -25,8 +25,7 @@ only-for-build-platforms:
jobs:
repackage-msi:
worker-type: 'b-win2022'
from-deps:
group-by: single-with-filters
from-deps: {}
mozharness:
use-magic-mh-args: false
config:

View File

@ -27,5 +27,4 @@ only-for-build-platforms:
jobs:
repackage-signing-l10n:
from-deps:
group-by: single-with-filters
from-deps: {}

View File

@ -23,5 +23,4 @@ only-for-build-platforms:
jobs:
repackage-signing-msi:
from-deps:
group-by: single-with-filters
from-deps: {}

View File

@ -15,5 +15,4 @@ kind-dependencies:
jobs:
repackage-signing-msix:
from-deps:
group-by: single-with-filters
from-deps: {}

View File

@ -25,5 +25,4 @@ only-for-build-platforms:
jobs:
repackage-signing:
from-deps:
group-by: single-with-filters
from-deps: {}

View File

@ -43,8 +43,7 @@ only-for-build-platforms:
jobs:
repackage:
from-deps:
group-by: single-with-filters
from-deps: {}
upstream-mac-kind:
by-build-type:
debug: build-mac-signing

View File

@ -7,7 +7,6 @@ loader: gecko_taskgraph.loader.transform:loader
transforms:
- gecko_taskgraph.transforms.try_job:transforms
- gecko_taskgraph.transforms.source_test:transforms
- taskgraph.transforms.task_context
- gecko_taskgraph.transforms.release_notifications:transforms
- gecko_taskgraph.transforms.job:transforms
- gecko_taskgraph.transforms.task:transforms
@ -39,6 +38,3 @@ jobs-from:
job-defaults:
attributes:
retrigger: true
task-context:
from-object: {}
substitution-fields: []

View File

@ -21,6 +21,5 @@ kind-dependencies:
jobs:
upload-generated-sources-dummy:
description: Dummy task to pull in mac x64 and aarch64 upload-generated-symbols tasks
from-deps:
group-by: single-with-filters
from-deps: {}
worker-type: succeed

View File

@ -22,6 +22,5 @@ jobs:
upload-symbols-dummy:
description: Dummy task to pull in mac x64 and aarch64 upload-symbols tasks
from-deps:
group-by: single-with-filters
unique-kinds: false
worker-type: succeed

View File

@ -107,5 +107,44 @@ def test_worker_caches(task, transform):
validate_schema(partial_schema, taskdesc["worker"][key], "validation error")
@pytest.mark.parametrize(
"workerfn", [fn for fn, *_ in job.registry["run-task"].values()]
)
@pytest.mark.parametrize(
"task",
(
{
"worker-type": "b-linux",
"run": {
"checkout": True,
"comm-checkout": False,
"command": "echo '{output}'",
"command-context": {"output": "hello", "extra": None},
"run-as-root": False,
"sparse-profile": False,
"tooltool-downloads": False,
},
},
),
)
def test_run_task_command_context(task, transform, workerfn):
config, job_, taskdesc, _ = transform(task)
job_ = deepcopy(job_)
def assert_cmd(expected):
cmd = taskdesc["worker"]["command"]
while isinstance(cmd, list):
cmd = cmd[-1]
assert cmd == expected
workerfn(config, job_, taskdesc)
assert_cmd("echo 'hello'")
job_copy = job_.copy()
del job_copy["run"]["command-context"]
workerfn(config, job_copy, taskdesc)
assert_cmd("echo '{output}'")
if __name__ == "__main__":
main()

View File

@ -27,6 +27,10 @@ mach_schema = Schema(
Required("comm-checkout"): bool,
# Base work directory used to set up the task.
Optional("workdir"): str,
# Context to substitute into the command using format string
# substitution (e.g {value}). This is useful if certain aspects of the
# command need to be generated in transforms.
Optional("command-context"): dict,
}
)

View File

@ -12,8 +12,9 @@ from mozbuild.util import memoize
from mozpack import path
from taskgraph.util.schema import Schema
from taskgraph.util.yaml import load_yaml
from voluptuous import Any, Optional, Required
from voluptuous import Any, Extra, Optional, Required
from gecko_taskgraph import GECKO
from gecko_taskgraph.transforms.job import run_job_using
from gecko_taskgraph.transforms.job.common import add_tooltool, support_vcs_checkout
from gecko_taskgraph.transforms.task import taskref_or_string
@ -46,6 +47,16 @@ run_task_schema = Schema(
# checkout arguments. If a list, it will be passed directly; otherwise
# it will be included in a single argument to `bash -cx`.
Required("command"): Any([taskref_or_string], taskref_or_string),
# Context to substitute into the command using format string
# substitution (e.g {value}). This is useful if certain aspects of the
# command need to be generated in transforms.
Optional("command-context"): {
# If present, loads a set of context variables from an unnested yaml
# file. If a value is present in both the provided file and directly
# in command-context, the latter will take priority.
Optional("from-file"): str,
Extra: object,
},
# Base work directory used to set up the task.
Optional("workdir"): str,
# If not false, tooltool downloads will be enabled via relengAPIProxy
@ -101,6 +112,25 @@ def script_url(config, script):
return f"{tc_url}/api/queue/v1/task/{task_id}/artifacts/public/{script}"
def substitute_command_context(command_context, command):
from_file = command_context.pop("from-file", None)
full_context = {}
if from_file:
full_context = load_yaml(os.path.join(GECKO, from_file))
else:
full_context = {}
full_context.update(command_context)
if isinstance(command, list):
for i in range(len(command)):
command[i] = command[i].format(**full_context)
else:
command = command.format(**full_context)
return command
@run_job_using(
"docker-worker", "run-task", schema=run_task_schema, defaults=worker_defaults
)
@ -124,7 +154,12 @@ def docker_worker_run_task(config, job, taskdesc):
}
)
run_command = run["command"]
if run.get("command-context"):
run_command = substitute_command_context(
run.get("command-context"), run["command"]
)
else:
run_command = run["command"]
run_cwd = run.get("cwd")
if run_cwd and run["checkout"]:
@ -228,6 +263,11 @@ def generic_worker_run_task(config, job, taskdesc):
run_command = f'"{run_command}"'
run_command = ["bash", "-cx", run_command]
if run.get("command-context"):
run_command = substitute_command_context(
run.get("command-context"), run_command
)
if run["comm-checkout"]:
command.append(
"--comm-checkout={}/comm".format(taskdesc["worker"]["env"]["GECKO_PATH"])

View File

@ -254,13 +254,8 @@ def set_base_revision_in_tgdiff(config, jobs):
yield job
continue
job["task-context"] = {
"from-object": {
"base_rev": data["changesets"][0]["parents"][0],
},
"substitution-fields": [
"run.command",
],
job["run"]["command-context"] = {
"base_rev": data["changesets"][0]["parents"][0]
}
yield job

View File

@ -32,7 +32,7 @@ def skip_only_or_not(config, task):
return False
@group_by("single-with-filters")
@group_by("single")
def single_grouping(config, tasks):
for task in tasks:
if skip_only_or_not(config.config, task):

9
third_party/python/poetry.lock generated vendored
View File

@ -1164,14 +1164,14 @@ test = ["aiofiles", "coverage", "flake8", "httmock", "httptest", "hypothesis", "
[[package]]
name = "taskcluster-taskgraph"
version = "6.2.1"
version = "5.7.0"
description = "Build taskcluster taskgraphs"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "taskcluster-taskgraph-6.2.1.tar.gz", hash = "sha256:30b9f1ace27af870c77a0989e647b6ae9469a58acc6045f4c80e130b4e9ffc8a"},
{file = "taskcluster_taskgraph-6.2.1-py3-none-any.whl", hash = "sha256:04d794917af42e08ff18fda897d370791d193fe1c29194ea49fe727e6b25ddc8"},
{file = "taskcluster-taskgraph-5.7.0.tar.gz", hash = "sha256:fb1e2b3d45a8dac713932d7849b919c7bf5d4c553a7ea791a8c3f3b222e845ae"},
{file = "taskcluster_taskgraph-5.7.0-py3-none-any.whl", hash = "sha256:a25bb0b68b9460902025ebd78aa5f00fe6674d07d0b1ccfa541a486d0112a82a"},
]
[package.dependencies]
@ -1182,6 +1182,7 @@ mozilla-repo-urls = "*"
PyYAML = ">=5.3.1"
redo = ">=2.0"
requests = ">=2.25"
requests-unixsocket = ">=0.2"
slugid = ">=2.0"
taskcluster-urls = ">=11.0"
voluptuous = ">=0.12.1"
@ -1414,4 +1415,4 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black (
[metadata]
lock-version = "2.0"
python-versions = "^3.7"
content-hash = "ad71620e96d879f75e91e2ffd6508dcdb48967c70da59492d8307f0b3aaf62aa"
content-hash = "195fb2c45ba5de3c5c98f5d2fa44b5ce64fd3dd45c324d528477637245413d0f"

View File

@ -42,7 +42,7 @@ setuptools==51.2.0
six==1.13.0
slugid==2.0.0
taskcluster==44.2.2
taskcluster-taskgraph==6.2.1
taskcluster-taskgraph==5.7.0
taskcluster-urls==13.0.1
toml==0.10.2
tomlkit==0.11.8

View File

@ -357,9 +357,9 @@ six==1.13.0 ; python_version >= "3.7" and python_version < "4.0" \
slugid==2.0.0 ; python_version >= "3.7" and python_version < "4.0" \
--hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297 \
--hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c
taskcluster-taskgraph==6.2.1 ; python_version >= "3.7" and python_version < "4.0" \
--hash=sha256:04d794917af42e08ff18fda897d370791d193fe1c29194ea49fe727e6b25ddc8 \
--hash=sha256:30b9f1ace27af870c77a0989e647b6ae9469a58acc6045f4c80e130b4e9ffc8a
taskcluster-taskgraph==5.7.0 ; python_version >= "3.7" and python_version < "4.0" \
--hash=sha256:a25bb0b68b9460902025ebd78aa5f00fe6674d07d0b1ccfa541a486d0112a82a \
--hash=sha256:fb1e2b3d45a8dac713932d7849b919c7bf5d4c553a7ea791a8c3f3b222e845ae
taskcluster-urls==13.0.1 ; python_version >= "3.7" and python_version < "4.0" \
--hash=sha256:5e25e7e6818e8877178b175ff43d2e6548afad72694aa125f404a7329ece0973 \
--hash=sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367 \

View File

@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: taskcluster-taskgraph
Version: 6.2.1
Version: 5.7.0
Summary: Build taskcluster taskgraphs
Home-page: https://github.com/taskcluster/taskgraph
Classifier: Development Status :: 5 - Production/Stable
@ -20,6 +20,7 @@ Requires-Dist: mozilla-repo-urls
Requires-Dist: PyYAML (>=5.3.1)
Requires-Dist: redo (>=2.0)
Requires-Dist: requests (>=2.25)
Requires-Dist: requests-unixsocket (>=0.2)
Requires-Dist: slugid (>=2.0)
Requires-Dist: taskcluster-urls (>=11.0)
Requires-Dist: voluptuous (>=0.12.1)

View File

@ -1,13 +1,13 @@
taskgraph/__init__.py,sha256=aInyG7m4elr01O8pYKyXvuWQSfgXhqHKCBEBTspy1xc,729
taskgraph/__init__.py,sha256=SnTKobCPUED34yMA4oMNOcRw5JOSY61QVx1OiGY9eeg,729
taskgraph/config.py,sha256=XJYKaA9Egn7aiyZ0v70VCq3Kc-XkK08CK2LDsDfsDR8,4822
taskgraph/create.py,sha256=MeWVr5gKJefjwK_3_xZUcDDu2NVH97gbUuu1dw_I9hA,5184
taskgraph/decision.py,sha256=qARBTlLYJ7NVw3aflrspRn_hFmvKcrXJ058yao_4b7A,12882
taskgraph/docker.py,sha256=6tdGVrKFNonznRJSP4IDZEhKnjV-wYKsR0nXnoDOvZk,7924
taskgraph/docker.py,sha256=UtUfv3F7YBmrI7tJ1XODG_VvfwG0oWpNlsv59Bst728,7834
taskgraph/files_changed.py,sha256=W3_gEgUT-mVH9DaaU_8X6gYpftrqBU3kgveGbzPLziU,2793
taskgraph/filter_tasks.py,sha256=R7tYXiaVPGIkQ6O1c9-QJrKZ59m9pFXCloUlPraVnZU,866
taskgraph/generator.py,sha256=8d59-CK8LcnaKLa_qJG_R2G1gofiHqCFY7OWRqBkn2o,15667
taskgraph/graph.py,sha256=bHUsv2pPa2SSaWgBY-ItIj7REPd0o4fFYrwoQbwFKTY,4680
taskgraph/main.py,sha256=wuEHkMvdQ7ls7bvNpk6sWXGyXLrOofoCV8oNtnSBKsw,26480
taskgraph/main.py,sha256=-BC0J4PhLL-6nvzHgk2YGHfPH8yfjYVbgeKBKiJ25QQ,26201
taskgraph/morph.py,sha256=Q6weAi-xpJM4XoKA2mM6gVXQYLnE1YSws53vTZygMkY,9192
taskgraph/optimize.py,sha256=NVshvkqRKr7SQvRdqz5CELmnIXeiODkDxlK0D9QMi9k,16487
taskgraph/parameters.py,sha256=TYB5P2rIdGn-C8a2fcBoub_HZ4Svk2Rn3TK0MetFvjc,11918
@ -44,33 +44,31 @@ taskgraph/optimize/__pycache__/strategies.cpython-38.pyc,sha256=fjYsf_6YNa-kIA4v
taskgraph/run-task/fetch-content,sha256=G1aAvZlTg0yWHqxhSxi4RvfxW-KBJ5JwnGtWRqfH_bg,29990
taskgraph/run-task/hgrc,sha256=BybWLDR89bWi3pE5T05UqmDHs02CbLypE-omLZWU6Uk,896
taskgraph/run-task/robustcheckout.py,sha256=vPKvHb3fIIJli9ZVZG88XYoa8Sohy2JrpmH6pDgBDHI,30813
taskgraph/run-task/run-task,sha256=Mpr195iq9eOh6B4MBpPzEDlxeNyJq0Fa2yrtlJunlXE,45434
taskgraph/run-task/run-task,sha256=KSIUkIfZUzjfJtiIPtwXBU4B0vA9hZnZslJ-heKRIuU,45128
taskgraph/transforms/__init__.py,sha256=aw1dz2sRWZcbTILl6SVDuqIEw0mDdjSYu3LCVs-RLXE,110
taskgraph/transforms/base.py,sha256=LFw2NwhrSriI3vbcCttArTFb7uHxckQpHeFZmatofvM,5146
taskgraph/transforms/cached_tasks.py,sha256=Z10VD1kEBVXJvj8qSsNTq2mYpklh0V1EN8OT6QK3v_E,2607
taskgraph/transforms/chunking.py,sha256=7z9oXiA2dDguYwJPaZYCi-fEzbc--O9avZAFS3vP_kg,2592
taskgraph/transforms/code_review.py,sha256=eE2xrDtdD_n3HT3caQ2HGAkPm6Uutdm4hDCpCoFjEps,707
taskgraph/transforms/docker_image.py,sha256=AUuWMx43FcQfgbXy4_2Sjae0cWrh5XWMMcJ3ItcoKes,7606
taskgraph/transforms/fetch.py,sha256=ORnxpVidOQtI1q1xeHl1c1jlShXD8R_jTGC2CX3lLM4,10479
taskgraph/transforms/from_deps.py,sha256=1mdjIWYshVI2zBywzB3JEqOyvqgVjFvarcQt9PLDSc4,8950
taskgraph/transforms/from_deps.py,sha256=aMqzvjC9ckK7T8-u4MoA0QyqSIceXfjJp4whExmUWHE,6647
taskgraph/transforms/notify.py,sha256=0sga-Ls9dhWLAsL0FBjXmVbbduee8LAZp_1pHBQR0iI,6019
taskgraph/transforms/release_notifications.py,sha256=jrb9CCT-z_etDf690T-AeCvdzIoVWBAeM_FGoW7FIzA,3305
taskgraph/transforms/task.py,sha256=0oQYH7Upjus0-gzCrYbE0tUKZQUEv6Uq1adGBqiNM60,52254
taskgraph/transforms/task_context.py,sha256=FxZwT69ozierogtlCTNvk7zCW52d0HdhCaJN7EDmI1s,4272
taskgraph/transforms/__pycache__/__init__.cpython-38.pyc,sha256=XHsSgZEVDiQqINzElOjBvjhPjyfaNjAwTtV5Aj6ubDQ,232
taskgraph/transforms/__pycache__/base.cpython-38.pyc,sha256=s5IC570o9P2J12benwBfeAN7RSRWKKxPErkEHNZbq_c,4510
taskgraph/transforms/job/__init__.py,sha256=JbNpqdoJRId24QVGe821r6u7Zvm2fTNvME_PMGunaoU,17706
taskgraph/transforms/job/__init__.py,sha256=FPr9rGFYtcVT0zPk7CwzowAsRmSYlPL3RtFgztjIFMI,17324
taskgraph/transforms/job/common.py,sha256=ldlbRI8sdEd-eUcre4GtXMerUg0RQZ_XSe9GwAkfI3I,5897
taskgraph/transforms/job/index_search.py,sha256=Ngh9FFu1bx2kHVTChW2vcrbnb3SzMneRHopXk18RfB4,1220
taskgraph/transforms/job/run_task.py,sha256=s9gq1bPdzBB0j2OguXJpWn1-S5Ctltqo4aLsB4kzpUc,8385
taskgraph/transforms/job/run_task.py,sha256=5vmSwjWBNniSU2UcbnE_BQGct4bUTULIivYXlFSqB-4,9814
taskgraph/transforms/job/toolchain.py,sha256=GOqIvp1MgtV-6whi2ofgSCFB7GolikZbfLXz0C1h0vc,6015
taskgraph/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
taskgraph/util/archive.py,sha256=nzYn8cQ3NfLAeV-2SuTNoeQ6hg8m40f6FQcSTyVIKwQ,2855
taskgraph/util/attributes.py,sha256=pPOFmwkDQQ-IqfDpVghZ10YI_qXRY4Bi5JP3xr6XVvc,2964
taskgraph/util/cached_tasks.py,sha256=o-yJ91wlWbzoDB2GvKPpGcDE27_IEMgczp_figEBjV8,3406
taskgraph/util/decision.py,sha256=uTC143FpTKQkGff5jIz3voWRYXBCHgx-XAm7FMW53hE,2433
taskgraph/util/dependencies.py,sha256=3Qba3zI87JYR5fk5FndGzEVW-5NIzzZrBf9rVYcnLD0,2734
taskgraph/util/docker.py,sha256=rTbzUt8S6s3N1r8gmwHrqsIY9VZ7TDWBM-jZQ5w0P_U,7762
taskgraph/util/dependencies.py,sha256=U9nncoFvE4aXWeOr_Q-igeKNkzqGvgSTveBZO3OMyI4,2592
taskgraph/util/docker.py,sha256=vdTruZT2Z_GVcyAYilaHt8VaRj4b-dtBKVWlq_GwYvE,11699
taskgraph/util/hash.py,sha256=31sQmDwQOavA5hWsmzWDNFoFTaTp5a7qLSQLNTEALD8,1661
taskgraph/util/keyed_by.py,sha256=cgBH4tG8eH5UUrm5q4ODG7A4fzkGAOI7feVoZy3V8Ho,3419
taskgraph/util/memoize.py,sha256=XDlwc-56gzoY8QTwOoiCOYL-igX7JoMcY-9Ih80Euc8,1331
@ -82,10 +80,10 @@ taskgraph/util/schema.py,sha256=JGd0Imjfv6JKCY_tjJtOYwI6uwKUaNgzAcvcZj5WE6A,8323
taskgraph/util/shell.py,sha256=MB9zHVSvxgOuszgmKr2rWUDahANZkbHHNkjjagZG_3I,1317
taskgraph/util/taskcluster.py,sha256=cGUGvkrefRHngjyZm_iQRYKRlGi4jMIr7ky0fi_YBrg,12445
taskgraph/util/taskgraph.py,sha256=ecKEvTfmLVvEKLPO_0g34CqVvc0iCzuNMh3064BZNrE,1969
taskgraph/util/templates.py,sha256=HGTaIKCpAwEzBDHq0cDai1HJjPJrdnHsjJz6N4LVpKI,2139
taskgraph/util/templates.py,sha256=Dqxfl244u-PX7dnsk3_vYyzDwpDgJtANK6NmZwN3Qow,1417
taskgraph/util/time.py,sha256=pNFcTH-iYRfm2-okm1lMATc4B5wO-_FXbOFXEtXD27g,3390
taskgraph/util/treeherder.py,sha256=A3rpPUQB60Gn1Yx-OZgKuWWGJ8x0-6tcdeeslzco9ag,2687
taskgraph/util/vcs.py,sha256=54Haq2XyC5CmPnjrPRQZY5wUeoFsaV9pWTYvBjPcVMA,18917
taskgraph/util/vcs.py,sha256=wyDcz1oIvxyS7HbLFUP-G8Y1io3mV5dgfYagnDMSJ90,18780
taskgraph/util/verify.py,sha256=cSd7EeP9hUvp-5WOvKDHrvpFAGb_LuiNPxPp0-YmNEA,8947
taskgraph/util/workertypes.py,sha256=1wgM6vLrlgtyv8854anVIs0Bx11kV8JJJaKcOHJc2j0,2498
taskgraph/util/yaml.py,sha256=hfKI_D8Q7dimq4_VvO3WEh8CJsTrsIMwN6set7HIQbY,990
@ -99,9 +97,9 @@ taskgraph/util/__pycache__/schema.cpython-38.pyc,sha256=0Hew9ATBeA1amGZ_EjL7yULT
taskgraph/util/__pycache__/verify.cpython-38.pyc,sha256=8U5zC7jsLhwX6741yjcuH5quED3PxKItqDuACPc6yW0,7721
taskgraph/util/__pycache__/workertypes.cpython-38.pyc,sha256=hD8JOa_1TnhyQSyNcAZRpltqzgazoa2ukQB5gDGTNB4,2014
taskgraph/util/__pycache__/yaml.cpython-38.pyc,sha256=qOzXDWZxoUTcfeHjrKZKUKoI1y4vFKrMl93s7tqAAF4,1271
taskcluster_taskgraph-6.2.1.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
taskcluster_taskgraph-6.2.1.dist-info/METADATA,sha256=IPinMIVrCoWs7yeylvxKwQgKOfpuJViBdNPjvNLbAGk,1046
taskcluster_taskgraph-6.2.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
taskcluster_taskgraph-6.2.1.dist-info/entry_points.txt,sha256=2hxDzE3qq_sHh-J3ROqwpxgQgxO-196phWAQREl2-XA,50
taskcluster_taskgraph-6.2.1.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10
taskcluster_taskgraph-6.2.1.dist-info/RECORD,,
taskcluster_taskgraph-5.7.0.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
taskcluster_taskgraph-5.7.0.dist-info/METADATA,sha256=xH6ezBoxrpvd-kNaM9_4LUX9CqBhpZR08hqxV4G9p4w,1089
taskcluster_taskgraph-5.7.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
taskcluster_taskgraph-5.7.0.dist-info/entry_points.txt,sha256=2hxDzE3qq_sHh-J3ROqwpxgQgxO-196phWAQREl2-XA,50
taskcluster_taskgraph-5.7.0.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10
taskcluster_taskgraph-5.7.0.dist-info/RECORD,,

View File

@ -1,5 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.37.1)
Generator: bdist_wheel (0.40.0)
Root-Is-Purelib: true
Tag: py3-none-any

View File

@ -2,7 +2,7 @@
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
__version__ = "6.2.1"
__version__ = "5.7.0"
# Maximum number of dependencies a single task can have
# https://docs.taskcluster.net/reference/platform/taskcluster-queue/references/api#createTask

View File

@ -5,7 +5,6 @@
import json
import os
import subprocess
import tarfile
from io import BytesIO
from textwrap import dedent
@ -102,9 +101,7 @@ def build_image(name, tag, args=None):
buf = BytesIO()
docker.stream_context_tar(".", image_dir, buf, "", args)
subprocess.run(
["docker", "image", "build", "--no-cache", "-t", tag, "-"], input=buf.getvalue()
)
docker.post_to_docker(buf.getvalue(), "/build", nocache=1, t=tag)
print(f"Successfully built {name} and tagged with {tag}")
@ -208,9 +205,7 @@ def load_image(url, imageName=None, imageTag=None):
reader.close()
subprocess.run(
["docker", "image", "load"], input=b"".join(download_and_modify_image())
)
docker.post_to_docker(download_and_modify_image(), "/images/load", quiet=0)
# Check that we found a repositories file
if not info.get("image") or not info.get("tag") or not info.get("layer"):

View File

@ -16,7 +16,6 @@ import traceback
from collections import namedtuple
from concurrent.futures import ProcessPoolExecutor, as_completed
from pathlib import Path
from textwrap import dedent
from typing import Any, List
import appdirs
@ -800,28 +799,16 @@ def init_taskgraph(options):
# Populate some defaults from the current repository.
context = {"project_name": root.name}
try:
repo_url = repo.get_url(remote=repo.remote_name)
except RuntimeError:
repo_url = ""
repo_url = repo.get_url()
if repo.tool == "git" and "github.com" in repo_url:
context["repo_host"] = "github"
elif repo.tool == "hg" and "hg.mozilla.org" in repo_url:
context["repo_host"] = "hgmo"
else:
print(
dedent(
"""\
Repository not supported!
Taskgraph only supports repositories hosted on Github or hg.mozilla.org.
Ensure you have a remote that points to one of these locations.
"""
),
file=sys.stderr,
raise RuntimeError(
"Repository not supported! Taskgraph currently only "
"supports repositories hosted on Github or hg.mozilla.org."
)
return 1
# Generate the project.
cookiecutter(

View File

@ -42,11 +42,6 @@ from typing import Optional
SECRET_BASEURL_TPL = "http://taskcluster/secrets/v1/secret/{}"
GITHUB_SSH_FINGERPRINT = (
b"github.com ssh-ed25519 "
b"AAAAC3NzaC1lZDI1NTE5AAAAIOMqqnkVzrm0SdG6UOoqKLsabgH5C9okWi0dh2l9GKJl\n"
b"github.com ecdsa-sha2-nistp256 "
b"AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEmKSENjQEezOmxkZMy7opKgwFB"
b"9nkt5YRrYMjNuG5N87uRgg6CLrbo5wAdT/y6v0mKV0U2w0WZ2YB/++Tpockg=\n"
b"github.com ssh-rsa "
b"AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY"
b"4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDP"

View File

@ -1,82 +0,0 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import copy
from textwrap import dedent
from voluptuous import ALLOW_EXTRA, Optional, Required
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import Schema
from taskgraph.util.templates import substitute
CHUNK_SCHEMA = Schema(
{
# Optional, so it can be used for a subset of tasks in a kind
Optional(
"chunk",
description=dedent(
"""
`chunk` can be used to split one task into `total-chunks`
tasks, substituting `this_chunk` and `total_chunks` into any
fields in `substitution-fields`.
""".lstrip()
),
): {
Required(
"total-chunks",
description=dedent(
"""
The total number of chunks to split the task into.
""".lstrip()
),
): int,
Optional(
"substitution-fields",
description=dedent(
"""
A list of fields that need to have `{this_chunk}` and/or
`{total_chunks}` replaced in them.
""".lstrip()
),
): [str],
}
},
extra=ALLOW_EXTRA,
)
transforms = TransformSequence()
transforms.add_validate(CHUNK_SCHEMA)
@transforms.add
def chunk_tasks(config, tasks):
for task in tasks:
chunk_config = task.pop("chunk", None)
if not chunk_config:
yield task
continue
total_chunks = chunk_config["total-chunks"]
for this_chunk in range(1, total_chunks + 1):
subtask = copy.deepcopy(task)
subs = {
"this_chunk": this_chunk,
"total_chunks": total_chunks,
}
subtask.setdefault("attributes", {})
subtask["attributes"].update(subs)
for field in chunk_config["substitution-fields"]:
container, subfield = subtask, field
while "." in subfield:
f, subfield = subfield.split(".", 1)
container = container[f]
subcontainer = copy.deepcopy(container[subfield])
subfield = substitute(subfield, **subs)
container[subfield] = substitute(subcontainer, **subs)
yield subtask

View File

@ -16,10 +16,9 @@ from textwrap import dedent
from voluptuous import Any, Extra, Optional, Required
from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.job import fetches_schema
from taskgraph.util.attributes import attrmatch
from taskgraph.util.dependencies import GROUP_BY_MAP, get_dependencies
from taskgraph.util.schema import Schema, validate_schema
from taskgraph.util.dependencies import GROUP_BY_MAP
from taskgraph.util.schema import Schema
FROM_DEPS_SCHEMA = Schema(
{
@ -37,16 +36,6 @@ FROM_DEPS_SCHEMA = Schema(
""".lstrip()
),
): list,
Optional(
"set-name",
description=dedent(
"""
When True, `from_deps` will derive a name for the generated
tasks from the name of the primary dependency. Defaults to
True.
""".lstrip()
),
): bool,
Optional(
"with-attributes",
description=dedent(
@ -91,17 +80,6 @@ FROM_DEPS_SCHEMA = Schema(
""".lstrip()
),
): bool,
Optional(
"fetches",
description=dedent(
"""
If present, a `fetches` entry will be added for each task
dependency. Attributes of the upstream task may be used as
substitution values in the `artifact` or `dest` values of the
`fetches` entry.
""".lstrip()
),
): {str: [fetches_schema]},
},
Extra: object,
},
@ -161,19 +139,15 @@ def from_deps(config, tasks):
group_by, arg = group_by.popitem()
func = GROUP_BY_MAP[group_by]
if func.schema:
validate_schema(
func.schema, arg, f"Invalid group-by {group_by} argument"
)
func.schema(arg)
groups = func(config, deps, arg)
else:
func = GROUP_BY_MAP[group_by]
groups = func(config, deps)
# Split the task, one per group.
set_name = from_deps.get("set-name", True)
copy_attributes = from_deps.get("copy-attributes", False)
unique_kinds = from_deps.get("unique-kinds", True)
fetches = from_deps.get("fetches", [])
for group in groups:
# Verify there is only one task per kind in each group.
group_kinds = {t.kind for t in group}
@ -183,10 +157,9 @@ def from_deps(config, tasks):
)
new_task = deepcopy(task)
new_task.setdefault("dependencies", {})
new_task["dependencies"].update(
{dep.kind if unique_kinds else dep.label: dep.label for dep in group}
)
new_task["dependencies"] = {
dep.kind if unique_kinds else dep.label: dep.label for dep in group
}
# Set name and copy attributes from the primary kind.
for kind in kinds:
@ -202,41 +175,14 @@ def from_deps(config, tasks):
primary_dep = [dep for dep in group if dep.kind == primary_kind][0]
if set_name:
if primary_dep.label.startswith(primary_kind):
new_task["name"] = primary_dep.label[len(primary_kind) + 1 :]
else:
new_task["name"] = primary_dep.label
if primary_dep.label.startswith(primary_kind):
new_task["name"] = primary_dep.label[len(primary_kind) + 1 :]
else:
new_task["name"] = primary_dep.label
if copy_attributes:
attrs = new_task.setdefault("attributes", {})
attrs = new_task.get("attributes", {})
new_task["attributes"] = primary_dep.attributes.copy()
new_task["attributes"].update(attrs)
if fetches:
task_fetches = new_task.setdefault("fetches", {})
for dep_task in get_dependencies(config, new_task):
# Nothing to do if this kind has no fetches listed
if dep_task.kind not in fetches:
continue
fetches_from_dep = []
for kind, kind_fetches in fetches.items():
if kind != dep_task.kind:
continue
for fetch in kind_fetches:
entry = fetch.copy()
entry["artifact"] = entry["artifact"].format(
**dep_task.attributes
)
if "dest" in entry:
entry["dest"] = entry["dest"].format(
**dep_task.attributes
)
fetches_from_dep.append(entry)
task_fetches[dep_task.label] = fetches_from_dep
yield new_task

View File

@ -27,16 +27,6 @@ from taskgraph.util.workertypes import worker_type_implementation
logger = logging.getLogger(__name__)
# Fetches may be accepted in other transforms and eventually passed along
# to a `job` (eg: from_deps). Defining this here allows them to re-use
# the schema and avoid duplication.
fetches_schema = {
Required("artifact"): str,
Optional("dest"): str,
Optional("extract"): bool,
Optional("verify-hash"): bool,
}
# Schema for a build description
job_description_schema = Schema(
{
@ -86,7 +76,12 @@ job_description_schema = Schema(
Any("toolchain", "fetch"): [str],
str: [
str,
fetches_schema,
{
Required("artifact"): str,
Optional("dest"): str,
Optional("extract"): bool,
Optional("verify-hash"): bool,
},
],
},
# A description of how to run this job.
@ -246,10 +241,9 @@ def use_fetches(config, jobs):
worker = job.setdefault("worker", {})
env = worker.setdefault("env", {})
prefix = get_artifact_prefix(job)
for kind in sorted(fetches):
artifacts = fetches[kind]
for kind, artifacts in fetches.items():
if kind in ("fetch", "toolchain"):
for fetch_name in sorted(artifacts):
for fetch_name in artifacts:
label = f"{kind}-{fetch_name}"
label = aliases.get(label, label)
if label not in artifact_names:
@ -301,13 +295,7 @@ def use_fetches(config, jobs):
prefix = get_artifact_prefix(dep_tasks[0])
def cmp_artifacts(a):
if isinstance(a, str):
return a
else:
return a["artifact"]
for artifact in sorted(artifacts, key=cmp_artifacts):
for artifact in artifacts:
if isinstance(artifact, str):
path = artifact
dest = None

View File

@ -8,13 +8,14 @@ Support for running jobs that are invoked via the `run-task` script.
import dataclasses
import os
from voluptuous import Any, Optional, Required
from voluptuous import Any, Extra, Optional, Required
from taskgraph.transforms.job import run_job_using
from taskgraph.transforms.job.common import support_vcs_checkout
from taskgraph.transforms.task import taskref_or_string
from taskgraph.util import path, taskcluster
from taskgraph.util.schema import Schema
from taskgraph.util.yaml import load_yaml
EXEC_COMMANDS = {
"bash": ["bash", "-cx"],
@ -45,6 +46,16 @@ run_task_schema = Schema(
# it will be included in a single argument to the command specified by
# `exec-with`.
Required("command"): Any([taskref_or_string], taskref_or_string),
# Context to substitute into the command using format string
# substitution (e.g {value}). This is useful if certain aspects of the
# command need to be generated in transforms.
Optional("command-context"): {
# If present, loads a set of context variables from an unnested yaml
# file. If a value is present in both the provided file and directly
# in command-context, the latter will take priority.
Optional("from-file"): str,
Extra: object,
},
# What to execute the command with in the event command is a string.
Optional("exec-with"): Any(*list(EXEC_COMMANDS)),
# Command used to invoke the `run-task` script. Can be used if the script
@ -126,6 +137,25 @@ def script_url(config, script):
return f"{tc_url}/api/queue/v1/task/{task_id}/artifacts/public/{script}"
def substitute_command_context(command_context, command):
from_file = command_context.pop("from-file", None)
full_context = {}
if from_file:
full_context = load_yaml(from_file)
else:
full_context = {}
full_context.update(command_context)
if isinstance(command, list):
for i in range(len(command)):
command[i] = command[i].format(**full_context)
else:
command = command.format(**full_context)
return command
@run_job_using(
"docker-worker", "run-task", schema=run_task_schema, defaults=worker_defaults
)
@ -147,6 +177,13 @@ def docker_worker_run_task(config, job, taskdesc):
run_command = run["command"]
if run.get("command-context"):
run_command = substitute_command_context(
run.get("command-context"), run["command"]
)
else:
run_command = run["command"]
# dict is for the case of `{'task-reference': str}`.
if isinstance(run_command, str) or isinstance(run_command, dict):
exec_cmd = EXEC_COMMANDS[run.pop("exec-with", "bash")]
@ -213,6 +250,11 @@ def generic_worker_run_task(config, job, taskdesc):
exec_cmd = EXEC_COMMANDS[run.pop("exec-with", "bash")]
run_command = exec_cmd + [run_command]
if run.get("command-context"):
run_command = substitute_command_context(
run.get("command-context"), run_command
)
if run["run-as-root"]:
command.extend(("--user", "root", "--group", "root"))
command.append("--")

View File

@ -1,121 +0,0 @@
from textwrap import dedent
from voluptuous import ALLOW_EXTRA, Any, Optional, Required
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.schema import Schema
from taskgraph.util.templates import deep_get, substitute
from taskgraph.util.yaml import load_yaml
SCHEMA = Schema(
{
Required(
"task-context",
description=dedent(
"""
`task-context` can be used to substitute values into any field in a
task with data that is not known until `taskgraph` runs.
This data can be provided via `from-parameters` or `from-file`,
which can pull in values from parameters and a defined yml file
respectively.
Data may also be provided directly in the `from-object` section of
`task-context`. This can be useful in `kinds` that define most of
their contents in `task-defaults`, but have some values that may
differ for various concrete `tasks` in the `kind`.
If the same key is found in multiple places the order of precedence
is as follows:
- Parameters
- `from-object` keys
- File
That is to say: parameters will always override anything else.
""".lstrip(),
),
): {
Optional(
"from-parameters",
description=dedent(
"""
Retrieve task context values from parameters. A single
parameter may be provided or a list of parameters in
priority order. The latter can be useful in implementing a
"default" value if some other parameter is not provided.
""".lstrip()
),
): {str: Any([str], str)},
Optional(
"from-file",
description=dedent(
"""
Retrieve task context values from a yaml file. The provided
file should usually only contain top level keys and values
(eg: nested objects will not be interpolated - they will be
substituted as text representations of the object).
""".lstrip()
),
): str,
Optional(
"from-object",
description="Key/value pairs to be used as task context",
): object,
Required(
"substitution-fields",
description=dedent(
"""
A list of fields in the task to substitute the provided values
into.
""".lstrip()
),
): [str],
},
},
extra=ALLOW_EXTRA,
)
transforms = TransformSequence()
transforms.add_validate(SCHEMA)
@transforms.add
def render_task(config, jobs):
for job in jobs:
sub_config = job.pop("task-context")
params_context = {}
for var, path in sub_config.pop("from-parameters", {}).items():
if isinstance(path, str):
params_context[var] = deep_get(config.params, path)
else:
for choice in path:
value = deep_get(config.params, choice)
if value is not None:
params_context[var] = value
break
file_context = {}
from_file = sub_config.pop("from-file", None)
if from_file:
file_context = load_yaml(from_file)
fields = sub_config.pop("substitution-fields")
subs = {}
subs.update(file_context)
# We've popped away the configuration; everything left in `sub_config` is
# substitution key/value pairs.
subs.update(sub_config.pop("from-object", {}))
subs.update(params_context)
# Now that we have our combined context, we can substitute.
for field in fields:
container, subfield = job, field
while "." in subfield:
f, subfield = subfield.split(".", 1)
container = container[f]
container[subfield] = substitute(container[subfield], **subs)
yield job

View File

@ -14,9 +14,6 @@ GROUP_BY_MAP = {}
def group_by(name, schema=None):
def wrapper(func):
assert (
name not in GROUP_BY_MAP
), f"duplicate group_by function name {name} ({func} and {GROUP_BY_MAP[name]})"
GROUP_BY_MAP[name] = func
func.schema = schema
return func

View File

@ -5,8 +5,13 @@
import hashlib
import io
import json
import os
import re
import sys
import urllib.parse
import requests_unixsocket
from taskgraph.util.archive import create_tar_gz_from_files
from taskgraph.util.memoize import memoize
@ -16,6 +21,106 @@ IMAGE_DIR = os.path.join(".", "taskcluster", "docker")
from .yaml import load_yaml
def docker_url(path, **kwargs):
docker_socket = os.environ.get("DOCKER_SOCKET", "/var/run/docker.sock")
return urllib.parse.urlunparse(
(
"http+unix",
urllib.parse.quote(docker_socket, safe=""),
path,
"",
urllib.parse.urlencode(kwargs),
"",
)
)
def post_to_docker(tar, api_path, **kwargs):
"""POSTs a tar file to a given docker API path.
The tar argument can be anything that can be passed to requests.post()
as data (e.g. iterator or file object).
The extra keyword arguments are passed as arguments to the docker API.
"""
req = requests_unixsocket.Session().post(
docker_url(api_path, **kwargs),
data=tar,
stream=True,
headers={"Content-Type": "application/x-tar"},
)
if req.status_code != 200:
message = req.json().get("message")
if not message:
message = f"docker API returned HTTP code {req.status_code}"
raise Exception(message)
status_line = {}
buf = b""
for content in req.iter_content(chunk_size=None):
if not content:
continue
# Sometimes, a chunk of content is not a complete json, so we cumulate
# with leftovers from previous iterations.
buf += content
try:
data = json.loads(buf)
except Exception:
continue
buf = b""
# data is sometimes an empty dict.
if not data:
continue
# Mimic how docker itself presents the output. This code was tested
# with API version 1.18 and 1.26.
if "status" in data:
if "id" in data:
if sys.stderr.isatty():
total_lines = len(status_line)
line = status_line.setdefault(data["id"], total_lines)
n = total_lines - line
if n > 0:
# Move the cursor up n lines.
sys.stderr.write(f"\033[{n}A")
# Clear line and move the cursor to the beginning of it.
sys.stderr.write("\033[2K\r")
sys.stderr.write(
"{}: {} {}\n".format(
data["id"], data["status"], data.get("progress", "")
)
)
if n > 1:
# Move the cursor down n - 1 lines, which, considering
# the carriage return on the last write, gets us back
# where we started.
sys.stderr.write(f"\033[{n - 1}B")
else:
status = status_line.get(data["id"])
# Only print status changes.
if status != data["status"]:
sys.stderr.write("{}: {}\n".format(data["id"], data["status"]))
status_line[data["id"]] = data["status"]
else:
status_line = {}
sys.stderr.write("{}\n".format(data["status"]))
elif "stream" in data:
sys.stderr.write(data["stream"])
elif "aux" in data:
sys.stderr.write(repr(data["aux"]))
elif "error" in data:
sys.stderr.write("{}\n".format(data["error"]))
# Sadly, docker doesn't give more than a plain string for errors,
# so the best we can do to propagate the error code from the command
# that failed is to parse the error message...
errcode = 1
m = re.search(r"returned a non-zero code: (\d+)", data["error"])
if m:
errcode = int(m.group(1))
sys.exit(errcode)
else:
raise NotImplementedError(repr(data))
sys.stderr.flush()
def docker_image(name, by_tag=False):
"""
Resolve in-tree prebuilt docker image to ``<registry>/<repository>@sha256:<digest>``,

View File

@ -48,33 +48,3 @@ def merge(*objects):
if len(objects) == 1:
return copy.deepcopy(objects[0])
return merge_to(objects[-1], merge(*objects[:-1]))
def deep_get(dict_, field):
container, subfield = dict_, field
while "." in subfield:
f, subfield = subfield.split(".", 1)
if f not in container:
return None
container = container[f]
return container.get(subfield)
def substitute(item, **subs):
if isinstance(item, list):
for i in range(len(item)):
item[i] = substitute(item[i], **subs)
elif isinstance(item, dict):
new_dict = {}
for k, v in item.items():
k = k.format(**subs)
new_dict[k] = substitute(v, **subs)
item = new_dict
elif isinstance(item, str):
item = item.format(**subs)
else:
item = item
return item

View File

@ -353,19 +353,13 @@ class GitRepository(Repository):
def remote_name(self):
try:
remote_branch_name = self.run(
"rev-parse",
"--verify",
"--abbrev-ref",
"--symbolic-full-name",
"@{u}",
stderr=subprocess.PIPE,
"rev-parse", "--verify", "--abbrev-ref", "--symbolic-full-name", "@{u}"
).strip()
return remote_branch_name.split("/")[0]
except subprocess.CalledProcessError as e:
# Error code 128 comes with the message:
# "fatal: no upstream configured for branch $BRANCH"
if e.returncode != 128:
print(e.stderr)
raise
return self._get_most_suitable_remote("`git remote add origin $URL`")