mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-12-03 10:33:33 +00:00
Bug 1896126 - Update taskcluster-taskgraph vendor to 8.2.0, r=taskgraph-reviewers,bhearsum
Differential Revision: https://phabricator.services.mozilla.com/D211235
This commit is contained in:
parent
6c9a98594a
commit
c4fc5f4245
9
third_party/python/poetry.lock
generated
vendored
9
third_party/python/poetry.lock
generated
vendored
@ -1161,7 +1161,6 @@ files = [
|
|||||||
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
|
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
|
||||||
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
|
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
|
||||||
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
|
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
|
||||||
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
|
|
||||||
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
|
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
|
||||||
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
|
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
|
||||||
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
|
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
|
||||||
@ -1376,14 +1375,14 @@ test = ["aiofiles", "coverage", "flake8", "httmock", "httptest", "hypothesis", "
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "taskcluster-taskgraph"
|
name = "taskcluster-taskgraph"
|
||||||
version = "8.0.1"
|
version = "8.2.0"
|
||||||
description = "Build taskcluster taskgraphs"
|
description = "Build taskcluster taskgraphs"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
files = [
|
files = [
|
||||||
{file = "taskcluster-taskgraph-8.0.1.tar.gz", hash = "sha256:21387537bbebab2a7b1890d03e20e49379bdda65efd45ca7fb8d01f5c29e1797"},
|
{file = "taskcluster-taskgraph-8.2.0.tar.gz", hash = "sha256:af146323402c2d5f67c65e3c232eda953da1ce319e465069e4d5c7aeb212b66e"},
|
||||||
{file = "taskcluster_taskgraph-8.0.1-py3-none-any.whl", hash = "sha256:14500bc703f64eb002c0cd505caaf2d34ffc0ae66d109b108e738661da1ae09c"},
|
{file = "taskcluster_taskgraph-8.2.0-py3-none-any.whl", hash = "sha256:410e9c9ef43eac1d0676f16867137de90f77eb0b4e0cbe746fe5512d1a626822"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@ -1625,4 +1624,4 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black (
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = "^3.8"
|
python-versions = "^3.8"
|
||||||
content-hash = "a683c725217ca4a2d61b56b43b137217b5f8630ee0715e44637fd29599e0b0a2"
|
content-hash = "401a5bde4f28e456dc5369de933b4382bfc9cbf12da2a9ccd7dbfa1a592f34b9"
|
||||||
|
2
third_party/python/requirements.in
vendored
2
third_party/python/requirements.in
vendored
@ -53,7 +53,7 @@ setuptools==68.0.0
|
|||||||
six==1.16.0
|
six==1.16.0
|
||||||
slugid==2.0.0
|
slugid==2.0.0
|
||||||
taskcluster==44.2.2
|
taskcluster==44.2.2
|
||||||
taskcluster-taskgraph==8.0.1
|
taskcluster-taskgraph==8.2.0
|
||||||
taskcluster-urls==13.0.1
|
taskcluster-urls==13.0.1
|
||||||
toml==0.10.2
|
toml==0.10.2
|
||||||
tomlkit==0.12.3
|
tomlkit==0.12.3
|
||||||
|
7
third_party/python/requirements.txt
vendored
7
third_party/python/requirements.txt
vendored
@ -492,7 +492,6 @@ pyyaml==6.0.1 ; python_version >= "3.8" and python_version < "4.0" \
|
|||||||
--hash=sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4 \
|
--hash=sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4 \
|
||||||
--hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \
|
--hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \
|
||||||
--hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \
|
--hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \
|
||||||
--hash=sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef \
|
|
||||||
--hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \
|
--hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \
|
||||||
--hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \
|
--hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \
|
||||||
--hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \
|
--hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \
|
||||||
@ -540,9 +539,9 @@ six==1.16.0 ; python_version >= "3.8" and python_version < "4.0" \
|
|||||||
slugid==2.0.0 ; python_version >= "3.8" and python_version < "4.0" \
|
slugid==2.0.0 ; python_version >= "3.8" and python_version < "4.0" \
|
||||||
--hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297 \
|
--hash=sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297 \
|
||||||
--hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c
|
--hash=sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c
|
||||||
taskcluster-taskgraph==8.0.1 ; python_version >= "3.8" and python_version < "4.0" \
|
taskcluster-taskgraph==8.2.0 ; python_version >= "3.8" and python_version < "4.0" \
|
||||||
--hash=sha256:14500bc703f64eb002c0cd505caaf2d34ffc0ae66d109b108e738661da1ae09c \
|
--hash=sha256:410e9c9ef43eac1d0676f16867137de90f77eb0b4e0cbe746fe5512d1a626822 \
|
||||||
--hash=sha256:21387537bbebab2a7b1890d03e20e49379bdda65efd45ca7fb8d01f5c29e1797
|
--hash=sha256:af146323402c2d5f67c65e3c232eda953da1ce319e465069e4d5c7aeb212b66e
|
||||||
taskcluster-urls==13.0.1 ; python_version >= "3.8" and python_version < "4.0" \
|
taskcluster-urls==13.0.1 ; python_version >= "3.8" and python_version < "4.0" \
|
||||||
--hash=sha256:5e25e7e6818e8877178b175ff43d2e6548afad72694aa125f404a7329ece0973 \
|
--hash=sha256:5e25e7e6818e8877178b175ff43d2e6548afad72694aa125f404a7329ece0973 \
|
||||||
--hash=sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367 \
|
--hash=sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367 \
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
Metadata-Version: 2.1
|
Metadata-Version: 2.1
|
||||||
Name: taskcluster-taskgraph
|
Name: taskcluster-taskgraph
|
||||||
Version: 8.0.1
|
Version: 8.2.0
|
||||||
Summary: Build taskcluster taskgraphs
|
Summary: Build taskcluster taskgraphs
|
||||||
Home-page: https://github.com/taskcluster/taskgraph
|
Home-page: https://github.com/taskcluster/taskgraph
|
||||||
Classifier: Development Status :: 5 - Production/Stable
|
Classifier: Development Status :: 5 - Production/Stable
|
@ -1,12 +1,12 @@
|
|||||||
taskgraph/__init__.py,sha256=hCl3NLzC-cVXlKhuzf0-_0wd0gYmNA3oshXfTaa9DNQ,729
|
taskgraph/__init__.py,sha256=Y7AMSO_xkN6zeyK0gagjJ7_kp0ra84C6-RPuLB6FH_A,729
|
||||||
taskgraph/config.py,sha256=8vntWUrPwGds22mFKYAgcsD4Mr8hoONTv2ssGBcClLw,5108
|
taskgraph/config.py,sha256=8vntWUrPwGds22mFKYAgcsD4Mr8hoONTv2ssGBcClLw,5108
|
||||||
taskgraph/create.py,sha256=_zokjSM3ZaO04l2LiMhenE8qXDZVfYvueIIu5hGUhzc,5185
|
taskgraph/create.py,sha256=_zokjSM3ZaO04l2LiMhenE8qXDZVfYvueIIu5hGUhzc,5185
|
||||||
taskgraph/decision.py,sha256=sG0CIj9OSOdfN65LSt6dRYFWbns9_JraVC5fQU1_7oc,13012
|
taskgraph/decision.py,sha256=gIvVLfMTd6KtnrTFkmFTrky93mknB9dxtL7_aZwEtoA,13088
|
||||||
taskgraph/docker.py,sha256=rk-tAMycHnapFyR2Q-XJXzC2A4uv0i-VykLZfwl-pRo,8417
|
taskgraph/docker.py,sha256=Tw2L4A3Mb3P4BdSkVilhSf8Ob38j15xIYYxtUXSDT9s,8415
|
||||||
taskgraph/filter_tasks.py,sha256=R7tYXiaVPGIkQ6O1c9-QJrKZ59m9pFXCloUlPraVnZU,866
|
taskgraph/filter_tasks.py,sha256=R7tYXiaVPGIkQ6O1c9-QJrKZ59m9pFXCloUlPraVnZU,866
|
||||||
taskgraph/generator.py,sha256=zrH1zfy-8akksKTSOf6e4FEsdOd5y7-h1Jne_2Jabcc,15703
|
taskgraph/generator.py,sha256=zrH1zfy-8akksKTSOf6e4FEsdOd5y7-h1Jne_2Jabcc,15703
|
||||||
taskgraph/graph.py,sha256=bHUsv2pPa2SSaWgBY-ItIj7REPd0o4fFYrwoQbwFKTY,4680
|
taskgraph/graph.py,sha256=bHUsv2pPa2SSaWgBY-ItIj7REPd0o4fFYrwoQbwFKTY,4680
|
||||||
taskgraph/main.py,sha256=tgfAEcNUJfmADteL24yJR5u7tzU4v3mzmxiogVSCK8Y,29072
|
taskgraph/main.py,sha256=n4p2LAN10Oo2yVv1G-cnWxK0FV2KcB9Q4H5m0K0qmw0,29171
|
||||||
taskgraph/morph.py,sha256=bwkaSGdTZLcK_rhF2st2mCGv9EHN5WdbnDeuZcqp9UA,9208
|
taskgraph/morph.py,sha256=bwkaSGdTZLcK_rhF2st2mCGv9EHN5WdbnDeuZcqp9UA,9208
|
||||||
taskgraph/parameters.py,sha256=hrwUHHu4PS79w-fQ3qNnLSyjRto1EDlidE8e1GzIy8U,12272
|
taskgraph/parameters.py,sha256=hrwUHHu4PS79w-fQ3qNnLSyjRto1EDlidE8e1GzIy8U,12272
|
||||||
taskgraph/target_tasks.py,sha256=9_v66bzmQFELPsfIDGITXrqzsmEiLq1EeuJFhycKL0M,3356
|
taskgraph/target_tasks.py,sha256=9_v66bzmQFELPsfIDGITXrqzsmEiLq1EeuJFhycKL0M,3356
|
||||||
@ -24,8 +24,8 @@ taskgraph/loader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
|
|||||||
taskgraph/loader/default.py,sha256=_bBJG6l04v44Jm5HSIEnVndC05NpNmq5L28QfJHk0wo,1185
|
taskgraph/loader/default.py,sha256=_bBJG6l04v44Jm5HSIEnVndC05NpNmq5L28QfJHk0wo,1185
|
||||||
taskgraph/loader/transform.py,sha256=olUBPjxk3eEIg25sduxlcyqhjoig4ts5kPlT_zs6g9g,2147
|
taskgraph/loader/transform.py,sha256=olUBPjxk3eEIg25sduxlcyqhjoig4ts5kPlT_zs6g9g,2147
|
||||||
taskgraph/optimize/__init__.py,sha256=Oqpq1RW8QzOcu7zaMlNQ3BHT9ws9e_93FWfCqzNcQps,123
|
taskgraph/optimize/__init__.py,sha256=Oqpq1RW8QzOcu7zaMlNQ3BHT9ws9e_93FWfCqzNcQps,123
|
||||||
taskgraph/optimize/base.py,sha256=wTViUwVmY9sZvlzSuGwkVrETCo0v2OfyNxFFgzJrDNc,18982
|
taskgraph/optimize/base.py,sha256=ckr0C2qzYTyp036oDInMDRaGmieAH7t93kOy-1hXPbg,20107
|
||||||
taskgraph/optimize/strategies.py,sha256=UryFI5TizzEF_2NO8MyuKwqVektHfJeG_t0_zZwxEds,2577
|
taskgraph/optimize/strategies.py,sha256=KTX9PJ846Z8Bpy7z2_JGlJtyIbpt8Di8qrM9oGcGElA,3241
|
||||||
taskgraph/run-task/fetch-content,sha256=G1aAvZlTg0yWHqxhSxi4RvfxW-KBJ5JwnGtWRqfH_bg,29990
|
taskgraph/run-task/fetch-content,sha256=G1aAvZlTg0yWHqxhSxi4RvfxW-KBJ5JwnGtWRqfH_bg,29990
|
||||||
taskgraph/run-task/hgrc,sha256=BybWLDR89bWi3pE5T05UqmDHs02CbLypE-omLZWU6Uk,896
|
taskgraph/run-task/hgrc,sha256=BybWLDR89bWi3pE5T05UqmDHs02CbLypE-omLZWU6Uk,896
|
||||||
taskgraph/run-task/robustcheckout.py,sha256=vPKvHb3fIIJli9ZVZG88XYoa8Sohy2JrpmH6pDgBDHI,30813
|
taskgraph/run-task/robustcheckout.py,sha256=vPKvHb3fIIJli9ZVZG88XYoa8Sohy2JrpmH6pDgBDHI,30813
|
||||||
@ -62,7 +62,7 @@ taskgraph/util/readonlydict.py,sha256=XzTG-gqGqWVlSkDxSyOL6Ur7Z0ONhIJ9DVLWV3q4q1
|
|||||||
taskgraph/util/schema.py,sha256=HmbbJ_i5uxZZHZSJ8sVWaD-VMhZI4ymx0STNcjO5t2M,8260
|
taskgraph/util/schema.py,sha256=HmbbJ_i5uxZZHZSJ8sVWaD-VMhZI4ymx0STNcjO5t2M,8260
|
||||||
taskgraph/util/set_name.py,sha256=cha9awo2nMQ9jfSEcbyNkZkCq_1Yg_kKJTfvDzabHSc,1134
|
taskgraph/util/set_name.py,sha256=cha9awo2nMQ9jfSEcbyNkZkCq_1Yg_kKJTfvDzabHSc,1134
|
||||||
taskgraph/util/shell.py,sha256=nf__ly0Ikhj92AiEBCQtvyyckm8UfO_3DSgz0SU-7QA,1321
|
taskgraph/util/shell.py,sha256=nf__ly0Ikhj92AiEBCQtvyyckm8UfO_3DSgz0SU-7QA,1321
|
||||||
taskgraph/util/taskcluster.py,sha256=LScpZknMycOOneIcRMf236rCTMRHHGxFTc9Lh7mRKaI,13057
|
taskgraph/util/taskcluster.py,sha256=-BlQqkxxH5S2BbZ4X2c0lNd1msU2xLM1S5rr8qrLwkE,15961
|
||||||
taskgraph/util/taskgraph.py,sha256=ecKEvTfmLVvEKLPO_0g34CqVvc0iCzuNMh3064BZNrE,1969
|
taskgraph/util/taskgraph.py,sha256=ecKEvTfmLVvEKLPO_0g34CqVvc0iCzuNMh3064BZNrE,1969
|
||||||
taskgraph/util/templates.py,sha256=HGTaIKCpAwEzBDHq0cDai1HJjPJrdnHsjJz6N4LVpKI,2139
|
taskgraph/util/templates.py,sha256=HGTaIKCpAwEzBDHq0cDai1HJjPJrdnHsjJz6N4LVpKI,2139
|
||||||
taskgraph/util/time.py,sha256=XauJ0DbU0fyFvHLzJLG4ehHv9KaKixxETro89GPC1yk,3350
|
taskgraph/util/time.py,sha256=XauJ0DbU0fyFvHLzJLG4ehHv9KaKixxETro89GPC1yk,3350
|
||||||
@ -71,9 +71,9 @@ taskgraph/util/vcs.py,sha256=FjS82fiTsoQ_ArjTCDOtDGfNdVUp_8zvVKB9SoAG3Rs,18019
|
|||||||
taskgraph/util/verify.py,sha256=htrNX7aXMMDzxymsFVcs0kaO5gErFHd62g9cQsZI_WE,8518
|
taskgraph/util/verify.py,sha256=htrNX7aXMMDzxymsFVcs0kaO5gErFHd62g9cQsZI_WE,8518
|
||||||
taskgraph/util/workertypes.py,sha256=1wgM6vLrlgtyv8854anVIs0Bx11kV8JJJaKcOHJc2j0,2498
|
taskgraph/util/workertypes.py,sha256=1wgM6vLrlgtyv8854anVIs0Bx11kV8JJJaKcOHJc2j0,2498
|
||||||
taskgraph/util/yaml.py,sha256=-LaIf3RROuaSWckOOGN5Iviu-DHWxIChgHn9a7n6ec4,1059
|
taskgraph/util/yaml.py,sha256=-LaIf3RROuaSWckOOGN5Iviu-DHWxIChgHn9a7n6ec4,1059
|
||||||
taskcluster_taskgraph-8.0.1.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
|
taskcluster_taskgraph-8.2.0.dist-info/LICENSE,sha256=HyVuytGSiAUQ6ErWBHTqt1iSGHhLmlC8fO7jTCuR8dU,16725
|
||||||
taskcluster_taskgraph-8.0.1.dist-info/METADATA,sha256=qg-m62f4BGLh2jBAr_-OQZhraOSciTrv5EyNY0Wwq8I,4688
|
taskcluster_taskgraph-8.2.0.dist-info/METADATA,sha256=minv1wMCm1M-KJtSo85Cj_tUPkQEdc3OuqHt-HT4tjE,4688
|
||||||
taskcluster_taskgraph-8.0.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
taskcluster_taskgraph-8.2.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
||||||
taskcluster_taskgraph-8.0.1.dist-info/entry_points.txt,sha256=2hxDzE3qq_sHh-J3ROqwpxgQgxO-196phWAQREl2-XA,50
|
taskcluster_taskgraph-8.2.0.dist-info/entry_points.txt,sha256=2hxDzE3qq_sHh-J3ROqwpxgQgxO-196phWAQREl2-XA,50
|
||||||
taskcluster_taskgraph-8.0.1.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10
|
taskcluster_taskgraph-8.2.0.dist-info/top_level.txt,sha256=3JNeYn_hNiNXC7DrdH_vcv-WYSE7QdgGjdvUYvSjVp0,10
|
||||||
taskcluster_taskgraph-8.0.1.dist-info/RECORD,,
|
taskcluster_taskgraph-8.2.0.dist-info/RECORD,,
|
@ -2,7 +2,7 @@
|
|||||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
__version__ = "8.0.1"
|
__version__ = "8.2.0"
|
||||||
|
|
||||||
# Maximum number of dependencies a single task can have
|
# Maximum number of dependencies a single task can have
|
||||||
# https://docs.taskcluster.net/reference/platform/taskcluster-queue/references/api#createTask
|
# https://docs.taskcluster.net/reference/platform/taskcluster-queue/references/api#createTask
|
||||||
|
@ -74,6 +74,8 @@ def taskgraph_decision(options, parameters=None):
|
|||||||
* generating a set of artifacts to memorialize the graph
|
* generating a set of artifacts to memorialize the graph
|
||||||
* calling TaskCluster APIs to create the graph
|
* calling TaskCluster APIs to create the graph
|
||||||
"""
|
"""
|
||||||
|
if options.get("verbose"):
|
||||||
|
logging.root.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
parameters = parameters or (
|
parameters = parameters or (
|
||||||
lambda graph_config: get_decision_parameters(graph_config, options)
|
lambda graph_config: get_decision_parameters(graph_config, options)
|
||||||
|
@ -16,7 +16,10 @@ except ImportError as e:
|
|||||||
zstd = e
|
zstd = e
|
||||||
|
|
||||||
from taskgraph.util import docker
|
from taskgraph.util import docker
|
||||||
from taskgraph.util.taskcluster import get_artifact_url, get_session
|
from taskgraph.util.taskcluster import (
|
||||||
|
get_artifact_url,
|
||||||
|
get_session,
|
||||||
|
)
|
||||||
|
|
||||||
DEPLOY_WARNING = """
|
DEPLOY_WARNING = """
|
||||||
*****************************************************************
|
*****************************************************************
|
||||||
@ -59,10 +62,9 @@ def load_image_by_name(image_name, tag=None):
|
|||||||
)
|
)
|
||||||
tasks = load_tasks_for_kind(params, "docker-image")
|
tasks = load_tasks_for_kind(params, "docker-image")
|
||||||
task = tasks[f"build-docker-image-{image_name}"]
|
task = tasks[f"build-docker-image-{image_name}"]
|
||||||
deadline = None
|
|
||||||
task_id = IndexSearch().should_replace_task(
|
indexes = task.optimization.get("index-search", [])
|
||||||
task, {}, deadline, task.optimization.get("index-search", [])
|
task_id = IndexSearch().should_replace_task(task, {}, None, indexes)
|
||||||
)
|
|
||||||
|
|
||||||
if task_id in (True, False):
|
if task_id in (True, False):
|
||||||
print(
|
print(
|
||||||
|
@ -697,6 +697,9 @@ def image_digest(args):
|
|||||||
"--tasks-for", required=True, help="the tasks_for value used to generate this task"
|
"--tasks-for", required=True, help="the tasks_for value used to generate this task"
|
||||||
)
|
)
|
||||||
@argument("--try-task-config-file", help="path to try task configuration file")
|
@argument("--try-task-config-file", help="path to try task configuration file")
|
||||||
|
@argument(
|
||||||
|
"--verbose", "-v", action="store_true", help="include debug-level logging output"
|
||||||
|
)
|
||||||
def decision(options):
|
def decision(options):
|
||||||
from taskgraph.decision import taskgraph_decision
|
from taskgraph.decision import taskgraph_decision
|
||||||
|
|
||||||
|
@ -22,6 +22,7 @@ from taskgraph.graph import Graph
|
|||||||
from taskgraph.taskgraph import TaskGraph
|
from taskgraph.taskgraph import TaskGraph
|
||||||
from taskgraph.util.parameterization import resolve_task_references, resolve_timestamps
|
from taskgraph.util.parameterization import resolve_task_references, resolve_timestamps
|
||||||
from taskgraph.util.python_path import import_sibling_modules
|
from taskgraph.util.python_path import import_sibling_modules
|
||||||
|
from taskgraph.util.taskcluster import find_task_id_batched, status_task_batched
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
registry = {}
|
registry = {}
|
||||||
@ -51,6 +52,9 @@ def optimize_task_graph(
|
|||||||
Perform task optimization, returning a taskgraph and a map from label to
|
Perform task optimization, returning a taskgraph and a map from label to
|
||||||
assigned taskId, including replacement tasks.
|
assigned taskId, including replacement tasks.
|
||||||
"""
|
"""
|
||||||
|
# avoid circular import
|
||||||
|
from taskgraph.optimize.strategies import IndexSearch
|
||||||
|
|
||||||
label_to_taskid = {}
|
label_to_taskid = {}
|
||||||
if not existing_tasks:
|
if not existing_tasks:
|
||||||
existing_tasks = {}
|
existing_tasks = {}
|
||||||
@ -70,6 +74,23 @@ def optimize_task_graph(
|
|||||||
do_not_optimize=do_not_optimize,
|
do_not_optimize=do_not_optimize,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Gather each relevant task's index
|
||||||
|
indexes = set()
|
||||||
|
for label in target_task_graph.graph.visit_postorder():
|
||||||
|
if label in do_not_optimize:
|
||||||
|
continue
|
||||||
|
_, strategy, arg = optimizations(label)
|
||||||
|
if isinstance(strategy, IndexSearch) and arg is not None:
|
||||||
|
indexes.update(arg)
|
||||||
|
|
||||||
|
index_to_taskid = {}
|
||||||
|
taskid_to_status = {}
|
||||||
|
if indexes:
|
||||||
|
# Find their respective status using TC index/queue batch APIs
|
||||||
|
indexes = list(indexes)
|
||||||
|
index_to_taskid = find_task_id_batched(indexes)
|
||||||
|
taskid_to_status = status_task_batched(list(index_to_taskid.values()))
|
||||||
|
|
||||||
replaced_tasks = replace_tasks(
|
replaced_tasks = replace_tasks(
|
||||||
target_task_graph=target_task_graph,
|
target_task_graph=target_task_graph,
|
||||||
optimizations=optimizations,
|
optimizations=optimizations,
|
||||||
@ -78,6 +99,8 @@ def optimize_task_graph(
|
|||||||
label_to_taskid=label_to_taskid,
|
label_to_taskid=label_to_taskid,
|
||||||
existing_tasks=existing_tasks,
|
existing_tasks=existing_tasks,
|
||||||
removed_tasks=removed_tasks,
|
removed_tasks=removed_tasks,
|
||||||
|
index_to_taskid=index_to_taskid,
|
||||||
|
taskid_to_status=taskid_to_status,
|
||||||
)
|
)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@ -259,12 +282,17 @@ def replace_tasks(
|
|||||||
label_to_taskid,
|
label_to_taskid,
|
||||||
removed_tasks,
|
removed_tasks,
|
||||||
existing_tasks,
|
existing_tasks,
|
||||||
|
index_to_taskid,
|
||||||
|
taskid_to_status,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Implement the "Replacing Tasks" phase, returning a set of task labels of
|
Implement the "Replacing Tasks" phase, returning a set of task labels of
|
||||||
all replaced tasks. The replacement taskIds are added to label_to_taskid as
|
all replaced tasks. The replacement taskIds are added to label_to_taskid as
|
||||||
a side-effect.
|
a side-effect.
|
||||||
"""
|
"""
|
||||||
|
# avoid circular import
|
||||||
|
from taskgraph.optimize.strategies import IndexSearch
|
||||||
|
|
||||||
opt_counts = defaultdict(int)
|
opt_counts = defaultdict(int)
|
||||||
replaced = set()
|
replaced = set()
|
||||||
dependents_of = target_task_graph.graph.reverse_links_dict()
|
dependents_of = target_task_graph.graph.reverse_links_dict()
|
||||||
@ -307,6 +335,10 @@ def replace_tasks(
|
|||||||
deadline = max(
|
deadline = max(
|
||||||
resolve_timestamps(now, task.task["deadline"]) for task in dependents
|
resolve_timestamps(now, task.task["deadline"]) for task in dependents
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if isinstance(opt, IndexSearch):
|
||||||
|
arg = arg, index_to_taskid, taskid_to_status
|
||||||
|
|
||||||
repl = opt.should_replace_task(task, params, deadline, arg)
|
repl = opt.should_replace_task(task, params, deadline, arg)
|
||||||
if repl:
|
if repl:
|
||||||
if repl is True:
|
if repl is True:
|
||||||
@ -316,7 +348,7 @@ def replace_tasks(
|
|||||||
removed_tasks.add(label)
|
removed_tasks.add(label)
|
||||||
else:
|
else:
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"replace_tasks: {label} replaced by optimization strategy"
|
f"replace_tasks: {label} replaced with {repl} by optimization strategy"
|
||||||
)
|
)
|
||||||
label_to_taskid[label] = repl
|
label_to_taskid[label] = repl
|
||||||
replaced.add(label)
|
replaced.add(label)
|
||||||
|
@ -22,12 +22,30 @@ class IndexSearch(OptimizationStrategy):
|
|||||||
|
|
||||||
fmt = "%Y-%m-%dT%H:%M:%S.%fZ"
|
fmt = "%Y-%m-%dT%H:%M:%S.%fZ"
|
||||||
|
|
||||||
def should_replace_task(self, task, params, deadline, index_paths):
|
def should_replace_task(self, task, params, deadline, arg):
|
||||||
"Look for a task with one of the given index paths"
|
"Look for a task with one of the given index paths"
|
||||||
|
batched = False
|
||||||
|
# Appease static checker that doesn't understand that this is not needed
|
||||||
|
label_to_taskid = {}
|
||||||
|
taskid_to_status = {}
|
||||||
|
|
||||||
|
if isinstance(arg, tuple) and len(arg) == 3:
|
||||||
|
# allow for a batched call optimization instead of two queries
|
||||||
|
# per index path
|
||||||
|
index_paths, label_to_taskid, taskid_to_status = arg
|
||||||
|
batched = True
|
||||||
|
else:
|
||||||
|
index_paths = arg
|
||||||
|
|
||||||
for index_path in index_paths:
|
for index_path in index_paths:
|
||||||
try:
|
try:
|
||||||
task_id = find_task_id(index_path)
|
if batched:
|
||||||
status = status_task(task_id)
|
task_id = label_to_taskid[index_path]
|
||||||
|
status = taskid_to_status[task_id]
|
||||||
|
else:
|
||||||
|
# 404 is raised as `KeyError` also end up here
|
||||||
|
task_id = find_task_id(index_path)
|
||||||
|
status = status_task(task_id)
|
||||||
# status can be `None` if we're in `testing` mode
|
# status can be `None` if we're in `testing` mode
|
||||||
# (e.g. test-action-callback)
|
# (e.g. test-action-callback)
|
||||||
if not status or status.get("state") in ("exception", "failed"):
|
if not status or status.get("state") in ("exception", "failed"):
|
||||||
@ -40,7 +58,7 @@ class IndexSearch(OptimizationStrategy):
|
|||||||
|
|
||||||
return task_id
|
return task_id
|
||||||
except KeyError:
|
except KeyError:
|
||||||
# 404 will end up here and go on to the next index path
|
# go on to the next index path
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
@ -193,6 +193,48 @@ def find_task_id(index_path, use_proxy=False):
|
|||||||
return response.json()["taskId"]
|
return response.json()["taskId"]
|
||||||
|
|
||||||
|
|
||||||
|
def find_task_id_batched(index_paths, use_proxy=False):
|
||||||
|
"""Gets the task id of multiple tasks given their respective index.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
index_paths (List[str]): A list of task indexes.
|
||||||
|
use_proxy (bool): Whether to use taskcluster-proxy (default: False)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, str]: A dictionary object mapping each valid index path
|
||||||
|
to its respective task id.
|
||||||
|
|
||||||
|
See the endpoint here:
|
||||||
|
https://docs.taskcluster.net/docs/reference/core/index/api#findTasksAtIndex
|
||||||
|
"""
|
||||||
|
endpoint = liburls.api(get_root_url(use_proxy), "index", "v1", "tasks/indexes")
|
||||||
|
task_ids = {}
|
||||||
|
continuation_token = None
|
||||||
|
|
||||||
|
while True:
|
||||||
|
response = _do_request(
|
||||||
|
endpoint,
|
||||||
|
json={
|
||||||
|
"indexes": index_paths,
|
||||||
|
},
|
||||||
|
params={"continuationToken": continuation_token},
|
||||||
|
)
|
||||||
|
|
||||||
|
response_data = response.json()
|
||||||
|
if not response_data["tasks"]:
|
||||||
|
break
|
||||||
|
response_tasks = response_data["tasks"]
|
||||||
|
if (len(task_ids) + len(response_tasks)) > len(index_paths):
|
||||||
|
# Sanity check
|
||||||
|
raise ValueError("more task ids were returned than were asked for")
|
||||||
|
task_ids.update((t["namespace"], t["taskId"]) for t in response_tasks)
|
||||||
|
|
||||||
|
continuationToken = response_data.get("continuationToken")
|
||||||
|
if continuationToken is None:
|
||||||
|
break
|
||||||
|
return task_ids
|
||||||
|
|
||||||
|
|
||||||
def get_artifact_from_index(index_path, artifact_path, use_proxy=False):
|
def get_artifact_from_index(index_path, artifact_path, use_proxy=False):
|
||||||
full_path = index_path + "/artifacts/" + artifact_path
|
full_path = index_path + "/artifacts/" + artifact_path
|
||||||
response = _do_request(get_index_url(full_path, use_proxy))
|
response = _do_request(get_index_url(full_path, use_proxy))
|
||||||
@ -271,6 +313,49 @@ def status_task(task_id, use_proxy=False):
|
|||||||
return status
|
return status
|
||||||
|
|
||||||
|
|
||||||
|
def status_task_batched(task_ids, use_proxy=False):
|
||||||
|
"""Gets the status of multiple tasks given task_ids.
|
||||||
|
|
||||||
|
In testing mode, just logs that it would have retrieved statuses.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
task_id (List[str]): A list of task ids.
|
||||||
|
use_proxy (bool): Whether to use taskcluster-proxy (default: False)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: A dictionary object as defined here:
|
||||||
|
https://docs.taskcluster.net/docs/reference/platform/queue/api#statuses
|
||||||
|
"""
|
||||||
|
if testing:
|
||||||
|
logger.info(f"Would have gotten status for {len(task_ids)} tasks.")
|
||||||
|
return
|
||||||
|
endpoint = liburls.api(get_root_url(use_proxy), "queue", "v1", "tasks/status")
|
||||||
|
statuses = {}
|
||||||
|
continuation_token = None
|
||||||
|
|
||||||
|
while True:
|
||||||
|
response = _do_request(
|
||||||
|
endpoint,
|
||||||
|
json={
|
||||||
|
"taskIds": task_ids,
|
||||||
|
},
|
||||||
|
params={
|
||||||
|
"continuationToken": continuation_token,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
response_data = response.json()
|
||||||
|
if not response_data["statuses"]:
|
||||||
|
break
|
||||||
|
response_tasks = response_data["statuses"]
|
||||||
|
if (len(statuses) + len(response_tasks)) > len(task_ids):
|
||||||
|
raise ValueError("more task statuses were returned than were asked for")
|
||||||
|
statuses.update((t["taskId"], t["status"]) for t in response_tasks)
|
||||||
|
continuationToken = response_data.get("continuationToken")
|
||||||
|
if continuationToken is None:
|
||||||
|
break
|
||||||
|
return statuses
|
||||||
|
|
||||||
|
|
||||||
def state_task(task_id, use_proxy=False):
|
def state_task(task_id, use_proxy=False):
|
||||||
"""Gets the state of a task given a task_id.
|
"""Gets the state of a task given a task_id.
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user