mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-28 23:31:56 +00:00
Backed out 16 changesets (bug 1492664) for breaking developer artifact builds, requested by standard8 a=backout
Backed out changeset 31e500489665 (bug 1492664) Backed out changeset f4945658d45f (bug 1492664) Backed out changeset 6d17291b8b92 (bug 1492664) Backed out changeset 90f3faa36137 (bug 1492664) Backed out changeset 0b229b00818a (bug 1492664) Backed out changeset 5eb2c77d70a9 (bug 1492664) Backed out changeset e1ebad5d89c5 (bug 1492664) Backed out changeset 3017e5890739 (bug 1492664) Backed out changeset c8b7e620eabf (bug 1492664) Backed out changeset d3dfbd848236 (bug 1492664) Backed out changeset 5c92bb5ac895 (bug 1492664) Backed out changeset fb7cfca6ebc3 (bug 1492664) Backed out changeset 0c4101230d4d (bug 1492664) Backed out changeset b93a0fcc86f3 (bug 1492664) Backed out changeset 6dc9522ee0bf (bug 1492664) Backed out changeset 85d7f8b330eb (bug 1492664)
This commit is contained in:
parent
91d742d138
commit
2e5e28f518
@ -146,9 +146,6 @@ tasks:
|
||||
GECKO_COMMIT_MSG: {$if: 'tasks_for != "action"', then: '${push.comment}'}
|
||||
HG_STORE_PATH: /builds/worker/checkouts/hg-store
|
||||
TASKCLUSTER_CACHES: /builds/worker/checkouts
|
||||
# someday, these will be provided by the worker - Bug 1492664
|
||||
TASKCLUSTER_ROOT_URL: https://taskcluster.net
|
||||
TASKCLUSTER_PROXY_URL: http://taskcluster
|
||||
- $if: 'tasks_for == "action"'
|
||||
then:
|
||||
ACTION_TASK_GROUP_ID: '${action.taskGroupId}' # taskGroupId of the target task
|
||||
|
@ -45,7 +45,6 @@ mozilla.pth:third_party/python/pytest/src
|
||||
mozilla.pth:third_party/python/pytoml
|
||||
mozilla.pth:third_party/python/redo
|
||||
mozilla.pth:third_party/python/six
|
||||
mozilla.pth:third_party/python/taskcluster-urls
|
||||
mozilla.pth:third_party/python/voluptuous
|
||||
mozilla.pth:third_party/python/json-e
|
||||
mozilla.pth:build
|
||||
|
@ -50,13 +50,9 @@ def REMOVED(cls):
|
||||
@CommandProvider
|
||||
class MachCommands(MachCommandBase):
|
||||
def _root_url(self, artifactdir=None, objdir=None):
|
||||
"""Generate a publicly-accessible URL for the tasks's artifacts, or an objdir path"""
|
||||
if 'TASK_ID' in os.environ and 'RUN_ID' in os.environ:
|
||||
import taskcluster_urls
|
||||
return taskcluster_urls.api(
|
||||
os.environ['TASKCLUSTER_ROOT_URL'],
|
||||
'queue', 'v1', 'task/{}/runs/{}/artifacts/{}'.format(
|
||||
os.environ['TASK_ID'], os.environ['RUN_ID'], artifactdir))
|
||||
return 'https://queue.taskcluster.net/v1/task/{}/runs/{}/artifacts/{}'.format(
|
||||
os.environ['TASK_ID'], os.environ['RUN_ID'], artifactdir)
|
||||
else:
|
||||
return os.path.join(self.topobjdir, objdir)
|
||||
|
||||
|
@ -218,7 +218,6 @@ Task group: [{task_group_id}](https://tools.taskcluster.net/groups/{task_group_i
|
||||
|
||||
notify_options = {}
|
||||
if 'TASKCLUSTER_PROXY_URL' in os.environ:
|
||||
# Until bug 1460015 is finished, use the old baseUrl style of proxy URL
|
||||
base_url = os.environ['TASKCLUSTER_PROXY_URL'].rstrip('/')
|
||||
notify_options['baseUrl'] = '{}/notify/v1'.format(base_url)
|
||||
notify = Notify(notify_options)
|
||||
|
@ -24,11 +24,12 @@ job-template:
|
||||
docker-image: {in-tree: "lint"}
|
||||
max-run-time: 900
|
||||
env:
|
||||
ARTIFACT_TASKID: {"task-reference": "<build>"}
|
||||
# {level} gets replaced in the upload_symbols transform
|
||||
SYMBOL_SECRET: "project/releng/gecko/build/level-{level}/gecko-symbol-upload"
|
||||
run:
|
||||
using: mach
|
||||
mach: {artifact-reference: "python toolkit/crashreporter/tools/upload_symbols.py <build/public/build/target.crashreporter-symbols-full.zip>"}
|
||||
mach: python toolkit/crashreporter/tools/upload_symbols.py https://queue.taskcluster.net/v1/task/${ARTIFACT_TASKID}/artifacts/public/build/target.crashreporter-symbols-full.zip
|
||||
sparse-profile: upload-symbols
|
||||
optimization:
|
||||
only-if-dependencies-run: null
|
||||
|
@ -49,9 +49,8 @@ RUN apt-get update && \
|
||||
COPY setup_packages.sh /usr/local/sbin/
|
||||
COPY cloud-mirror-workaround.sh /usr/local/sbin/
|
||||
|
||||
# %ARG TASKCLUSTER_ROOT_URL
|
||||
# %ARG DOCKER_IMAGE_PACKAGES
|
||||
RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES && \
|
||||
RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES && \
|
||||
echo 'dir::bin::methods::https "/usr/local/sbin/cloud-mirror-workaround.sh";' > /etc/apt/apt.conf.d/99cloud-mirror-workaround && \
|
||||
apt-get update && \
|
||||
apt-get install \
|
||||
|
@ -1,17 +1,5 @@
|
||||
#!/bin/sh
|
||||
|
||||
TASKCLUSTER_ROOT_URL=$1
|
||||
shift
|
||||
|
||||
# duplicate the functionality of taskcluster-lib-urls, but in bash..
|
||||
if [ "$TASKCLUSTER_ROOT_URL" = "https://taskcluster.net" ]; then
|
||||
queue_base='https://queue.taskcluster.net/v1'
|
||||
else
|
||||
queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1"
|
||||
fi
|
||||
|
||||
|
||||
for task in "$@"; do
|
||||
echo "adding package source $queue_base/task/$task/artifacts/public/build/"
|
||||
echo "deb [trusted=yes] $queue_base/task/$task/artifacts/public/build/ debian/" > "/etc/apt/sources.list.d/99$task.list"
|
||||
echo "deb [trusted=yes] https://queue.taskcluster.net/v1/task/$task/artifacts/public/build/ debian/" > "/etc/apt/sources.list.d/99$task.list"
|
||||
done
|
||||
|
@ -8,9 +8,8 @@ VOLUME /builds/worker/tooltool-cache
|
||||
|
||||
ENV XZ_OPT=-T0
|
||||
|
||||
# %ARG TASKCLUSTER_ROOT_URL
|
||||
# %ARG DOCKER_IMAGE_PACKAGES
|
||||
RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES
|
||||
RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES
|
||||
|
||||
# %ARG ARCH
|
||||
RUN dpkg --add-architecture $ARCH
|
||||
|
@ -6,8 +6,7 @@ VOLUME /builds/worker/checkouts
|
||||
VOLUME /builds/worker/workspace
|
||||
VOLUME /builds/worker/tooltool-cache
|
||||
|
||||
# %ARG TASKCLUSTER_ROOT_URL
|
||||
# %ARG DOCKER_IMAGE_PACKAGES
|
||||
RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES && \
|
||||
RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES && \
|
||||
apt-get update && \
|
||||
apt-get install cmake
|
||||
|
@ -11,13 +11,6 @@ mkdir a b
|
||||
# implemented, it's better to first manually extract the data.
|
||||
# Plus dmg files are not supported yet.
|
||||
|
||||
# duplicate the functionality of taskcluster-lib-urls, but in bash..
|
||||
if [ "$TASKCLUSTER_ROOT_URL" = "https://taskcluster.net" ]; then
|
||||
queue_base='https://queue.taskcluster.net/v1'
|
||||
else
|
||||
queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1"
|
||||
fi
|
||||
|
||||
case "$ORIG_URL" in
|
||||
*/target.zip|*/target.apk)
|
||||
curl -sL "$ORIG_URL" > a.zip
|
||||
@ -33,7 +26,7 @@ case "$ORIG_URL" in
|
||||
# We don't have mach available to call mach artifact toolchain.
|
||||
# This is the trivial equivalent for those toolchains we use here.
|
||||
for t in $MOZ_TOOLCHAINS; do
|
||||
curl -sL $queue_base/task/${t#*@}/artifacts/${t%@*} | tar -Jxf -
|
||||
curl -sL https://queue.taskcluster.net/v1/task/${t#*@}/artifacts/${t%@*} | tar -Jxf -
|
||||
done
|
||||
for tool in lipo otool; do
|
||||
ln -s /builds/worker/cctools/bin/x86_64-apple-darwin*-$tool bin/$tool
|
||||
|
@ -9,14 +9,8 @@ test "$SHA384_SIGNING_CERT"
|
||||
ARTIFACTS_DIR="/home/worker/artifacts"
|
||||
mkdir -p "$ARTIFACTS_DIR"
|
||||
|
||||
# duplicate the functionality of taskcluster-lib-urls, but in bash..
|
||||
if [ "$TASKCLUSTER_ROOT_URL" = "https://taskcluster.net" ]; then
|
||||
queue_base='https://queue.taskcluster.net/v1'
|
||||
else
|
||||
queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1"
|
||||
fi
|
||||
|
||||
curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json "$queue_base/task/$TASK_ID"
|
||||
curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json \
|
||||
"https://queue.taskcluster.net/v1/task/$TASK_ID"
|
||||
|
||||
# auth:aws-s3:read-write:tc-gp-private-1d-us-east-1/releng/mbsdiff-cache/
|
||||
# -> bucket of tc-gp-private-1d-us-east-1, path of releng/mbsdiff-cache/
|
||||
@ -33,8 +27,7 @@ then
|
||||
test "${AWS_BUCKET_NAME}"
|
||||
|
||||
set +x # Don't echo these.
|
||||
# Until bug 1460015 is finished, use baseUrl-style proxy URLs
|
||||
secret_url="${TASKCLUSTER_PROXY_URL}/auth/v1/aws/s3/read-write/${AWS_BUCKET_NAME}/${S3_PATH}"
|
||||
secret_url="taskcluster/auth/v1/aws/s3/read-write/${AWS_BUCKET_NAME}/${S3_PATH}"
|
||||
AUTH=$(curl "${secret_url}")
|
||||
AWS_ACCESS_KEY_ID=$(echo "${AUTH}" | jq -r '.credentials.accessKeyId')
|
||||
AWS_SECRET_ACCESS_KEY=$(echo "${AUTH}" | jq -r '.credentials.secretAccessKey')
|
||||
|
@ -35,19 +35,16 @@ log = logging.getLogger(__name__)
|
||||
ddstats = ThreadStats(namespace='releng.releases.partials')
|
||||
|
||||
|
||||
ROOT_URL = os.environ['TASKCLUSTER_ROOT_URL']
|
||||
QUEUE_PREFIX = ("https://queue.taskcluster.net/"
|
||||
if ROOT_URL == 'https://taskcluster.net'
|
||||
else ROOT_URL + '/api/queue/')
|
||||
ALLOWED_URL_PREFIXES = (
|
||||
"http://download.cdn.mozilla.net/pub/mozilla.org/firefox/nightly/",
|
||||
"http://download.cdn.mozilla.net/pub/firefox/nightly/",
|
||||
"https://mozilla-nightly-updates.s3.amazonaws.com",
|
||||
"https://queue.taskcluster.net/",
|
||||
"http://ftp.mozilla.org/",
|
||||
"http://download.mozilla.org/",
|
||||
"https://archive.mozilla.org/",
|
||||
"http://archive.mozilla.org/",
|
||||
QUEUE_PREFIX,
|
||||
"https://queue.taskcluster.net/v1/task/",
|
||||
)
|
||||
STAGING_URL_PREFIXES = (
|
||||
"http://ftp.stage.mozaws.net/",
|
||||
|
@ -51,25 +51,18 @@ fi
|
||||
export ARTIFACTS_DIR="/home/worker/artifacts"
|
||||
mkdir -p "$ARTIFACTS_DIR"
|
||||
|
||||
# duplicate the functionality of taskcluster-lib-urls, but in bash..
|
||||
if [ "$TASKCLUSTER_ROOT_URL" = "https://taskcluster.net" ]; then
|
||||
queue_base='https://queue.taskcluster.net/v1'
|
||||
else
|
||||
queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1"
|
||||
fi
|
||||
|
||||
# Get Arcanist API token
|
||||
|
||||
if [ -n "${TASK_ID}" ]
|
||||
then
|
||||
curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json "$queue_base/task/$TASK_ID"
|
||||
curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json \
|
||||
"https://queue.taskcluster.net/v1/task/$TASK_ID"
|
||||
ARC_SECRET=$(jq -r '.scopes[] | select(contains ("arc-phabricator-token"))' /home/worker/task.json | awk -F: '{print $3}')
|
||||
fi
|
||||
if [ -n "${ARC_SECRET}" ] && getent hosts taskcluster
|
||||
then
|
||||
set +x # Don't echo these
|
||||
# Until bug 1460015 is finished, use baseUrl-style proxy URLs
|
||||
secrets_url="${TASKCLUSTER_PROXY_URL}/secrets/v1/secret/${ARC_SECRET}"
|
||||
secrets_url="http://taskcluster/secrets/v1/secret/${ARC_SECRET}"
|
||||
SECRET=$(curl "${secrets_url}")
|
||||
TOKEN=$(echo "${SECRET}" | jq -r '.secret.token')
|
||||
elif [ -n "${ARC_TOKEN}" ] # Allow for local testing.
|
||||
|
@ -95,15 +95,6 @@ BLOCKLIST_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${BLOCKLIST_DIFF_ARTIFACT:-"blocklist.
|
||||
REMOTE_SETTINGS_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${REMOTE_SETTINGS_DIFF_ARTIFACT:-"remote-settings.diff"}"
|
||||
SUFFIX_LIST_DIFF_ARTIFACT="${ARTIFACTS_DIR}/${SUFFIX_LIST_DIFF_ARTIFACT:-"effective_tld_names.diff"}"
|
||||
|
||||
# duplicate the functionality of taskcluster-lib-urls, but in bash..
|
||||
if [ "$TASKCLUSTER_ROOT_URL" = "https://taskcluster.net" ]; then
|
||||
queue_base='https://queue.taskcluster.net/v1'
|
||||
index_base='https://index.taskcluster.net/v1'
|
||||
else
|
||||
queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1"
|
||||
index_base="$TASKCLUSTER_ROOT_URL/api/index/v1"
|
||||
fi
|
||||
|
||||
# Get the current in-tree version for a code branch.
|
||||
function get_version {
|
||||
VERSION_REPO=$1
|
||||
@ -155,11 +146,11 @@ function download_shared_artifacts_from_tc {
|
||||
|
||||
# Download everything we need to run js with xpcshell
|
||||
echo "INFO: Downloading all the necessary pieces from the taskcluster index..."
|
||||
TASKID_URL="$index_base/task/gecko.v2.${REPODIR}.latest.${PRODUCT}.linux64-opt"
|
||||
TASKID_URL="https://index.taskcluster.net/v1/task/gecko.v2.${REPODIR}.latest.${PRODUCT}.linux64-opt"
|
||||
if [ "${USE_MC}" == "true" ]; then
|
||||
TASKID_URL="$index_base/task/gecko.v2.mozilla-central.latest.${PRODUCT}.linux64-opt"
|
||||
TASKID_URL="https://index.taskcluster.net/v1/task/gecko.v2.mozilla-central.latest.${PRODUCT}.linux64-opt"
|
||||
fi
|
||||
${WGET} -O ${TASKID_FILE} "${TASKID_URL}"
|
||||
${WGET} -O ${TASKID_FILE} ${TASKID_URL}
|
||||
INDEX_TASK_ID="$($JQ -r '.taskId' ${TASKID_FILE})"
|
||||
if [ -z "${INDEX_TASK_ID}" ]; then
|
||||
echo "Failed to look up taskId at ${TASKID_URL}"
|
||||
@ -169,16 +160,16 @@ function download_shared_artifacts_from_tc {
|
||||
fi
|
||||
|
||||
TASKSTATUS_FILE="taskstatus.json"
|
||||
STATUS_URL="$queue_base/task/${INDEX_TASK_ID}/status"
|
||||
STATUS_URL="https://queue.taskcluster.net/v1/task/${INDEX_TASK_ID}/status"
|
||||
${WGET} -O "${TASKSTATUS_FILE}" "${STATUS_URL}"
|
||||
LAST_RUN_INDEX=$(($(jq '.status.runs | length' ${TASKSTATUS_FILE}) - 1))
|
||||
echo "INFO: Examining run number ${LAST_RUN_INDEX}"
|
||||
|
||||
BROWSER_ARCHIVE_URL="$queue_base/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${BROWSER_ARCHIVE}"
|
||||
BROWSER_ARCHIVE_URL="https://queue.taskcluster.net/v1/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${BROWSER_ARCHIVE}"
|
||||
echo "INFO: ${WGET} ${BROWSER_ARCHIVE_URL}"
|
||||
${WGET} "${BROWSER_ARCHIVE_URL}"
|
||||
|
||||
TESTS_ARCHIVE_URL="$queue_base/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${TESTS_ARCHIVE}"
|
||||
TESTS_ARCHIVE_URL="https://queue.taskcluster.net/v1/task/${INDEX_TASK_ID}/runs/${LAST_RUN_INDEX}/artifacts/public/build/${TESTS_ARCHIVE}"
|
||||
echo "INFO: ${WGET} ${TESTS_ARCHIVE_URL}"
|
||||
${WGET} "${TESTS_ARCHIVE_URL}"
|
||||
}
|
||||
|
@ -19,25 +19,18 @@ fi
|
||||
export ARTIFACTS_DIR="/home/worker/artifacts"
|
||||
mkdir -p "$ARTIFACTS_DIR"
|
||||
|
||||
# duplicate the functionality of taskcluster-lib-urls, but in bash..
|
||||
if [ "$TASKCLUSTER_ROOT_URL" = "https://taskcluster.net" ]; then
|
||||
queue_base='https://queue.taskcluster.net/v1'
|
||||
else
|
||||
queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1"
|
||||
fi
|
||||
|
||||
# Get Arcanist API token
|
||||
|
||||
if [ -n "${TASK_ID}" ]
|
||||
then
|
||||
curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json "$queue_base/task/$TASK_ID"
|
||||
curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json \
|
||||
"https://queue.taskcluster.net/v1/task/$TASK_ID"
|
||||
ARC_SECRET=$(jq -r '.scopes[] | select(contains ("arc-phabricator-token"))' /home/worker/task.json | awk -F: '{print $3}')
|
||||
fi
|
||||
if [ -n "${ARC_SECRET}" ] && getent hosts taskcluster
|
||||
then
|
||||
set +x # Don't echo these
|
||||
# Until bug 1460015 is finished, use the old, baseUrl-style proxy URLs
|
||||
secrets_url="${TASKCLUSTER_PROXY_URL}/secrets/v1/secret/${ARC_SECRET}"
|
||||
secrets_url="http://taskcluster/secrets/v1/secret/${ARC_SECRET}"
|
||||
SECRET=$(curl "${secrets_url}")
|
||||
TOKEN=$(echo "${SECRET}" | jq -r '.secret.token')
|
||||
elif [ -n "${ARC_TOKEN}" ] # Allow for local testing.
|
||||
|
@ -9,8 +9,7 @@ VOLUME /builds/worker/tooltool-cache
|
||||
ENV XZ_OPT=-T0
|
||||
|
||||
# %ARG DOCKER_IMAGE_PACKAGES
|
||||
# %ARG TASKCLUSTER_ROOT_URL
|
||||
RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES
|
||||
RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install \
|
||||
|
@ -70,6 +70,6 @@ label. At this phase, the edges in the task graph diverge from the
|
||||
``task.dependencies`` attributes, as the latter may contain dependencies
|
||||
outside of the taskgraph (for replacement tasks).
|
||||
|
||||
As a side-effect, this phase also expands all ``{"task-reference": ".."}`` and
|
||||
``{"artifact-reference": ".."}`` objects within the task definitions.
|
||||
As a side-effect, this phase also expands all ``{"task-reference": ".."}``
|
||||
objects within the task definitions.
|
||||
|
||||
|
@ -176,11 +176,6 @@ using simple parameterized values, as follows:
|
||||
Multiple labels may be substituted in a single string, and ``<<>`` can be
|
||||
used to escape a literal ``<``.
|
||||
|
||||
``{"artifact-reference": "..<dep-name/artifact/name>.."}``
|
||||
Similar to a ``task-reference``, but this substitutes a URL to the queue's
|
||||
``getLatestArtifact`` API method (for which a GET will redirect to the
|
||||
artifact itself).
|
||||
|
||||
.. _taskgraph-graph-config:
|
||||
|
||||
Graph Configuration
|
||||
|
@ -202,10 +202,9 @@ this common functionality. They expect a "signing description", and produce a
|
||||
task definition. The schema for a signing description is defined at the top of
|
||||
``signing.py``, with copious comments.
|
||||
|
||||
In particular you define a set of upstream artifact urls (that point at the
|
||||
dependent task) and can optionally provide a dependent name (defaults to build)
|
||||
for use in ``task-reference``/``artifact-reference``. You also need to provide
|
||||
the signing formats to use.
|
||||
In particular you define a set of upstream artifact urls (that point at the dependent
|
||||
task) and can optionally provide a dependent name (defaults to build) for use in
|
||||
task-reference. You also need to provide the signing formats to use.
|
||||
|
||||
More Detail
|
||||
-----------
|
||||
|
@ -182,7 +182,7 @@ class MachCommands(MachCommandBase):
|
||||
|
||||
import taskgraph.decision
|
||||
try:
|
||||
self.setup()
|
||||
self.setup_logging()
|
||||
return taskgraph.decision.taskgraph_decision(options)
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
@ -221,7 +221,7 @@ class MachCommands(MachCommandBase):
|
||||
from the hooks service on a regular basis."""
|
||||
import taskgraph.cron
|
||||
try:
|
||||
self.setup()
|
||||
self.setup_logging()
|
||||
return taskgraph.cron.taskgraph_cron(options)
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
@ -234,7 +234,7 @@ class MachCommands(MachCommandBase):
|
||||
def action_callback(self, **options):
|
||||
import taskgraph.actions
|
||||
try:
|
||||
self.setup()
|
||||
self.setup_logging()
|
||||
|
||||
# the target task for this action (or null if it's a group action)
|
||||
task_id = json.loads(os.environ.get('ACTION_TASK_ID', 'null'))
|
||||
@ -287,8 +287,7 @@ class MachCommands(MachCommandBase):
|
||||
raise Exception("unknown filename {}".format(filename))
|
||||
|
||||
try:
|
||||
self.setup()
|
||||
|
||||
self.setup_logging()
|
||||
task_id = options['task_id']
|
||||
|
||||
if options['input']:
|
||||
@ -313,7 +312,7 @@ class MachCommands(MachCommandBase):
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
def setup(self, quiet=False, verbose=True):
|
||||
def setup_logging(self, quiet=False, verbose=True):
|
||||
"""
|
||||
Set up Python logging for all loggers, sending results to stderr (so
|
||||
that command output can be redirected easily) and adding the typical
|
||||
@ -333,20 +332,15 @@ class MachCommands(MachCommandBase):
|
||||
# all of the taskgraph logging is unstructured logging
|
||||
self.log_manager.enable_unstructured()
|
||||
|
||||
# Ensure that TASKCLUSTER_ROOT_URL is set
|
||||
import taskgraph
|
||||
taskgraph.set_root_url_env()
|
||||
|
||||
def show_taskgraph(self, graph_attr, options):
|
||||
import taskgraph.parameters
|
||||
import taskgraph.generator
|
||||
import taskgraph
|
||||
|
||||
if options['fast']:
|
||||
taskgraph.fast = True
|
||||
|
||||
try:
|
||||
self.setup(quiet=options['quiet'], verbose=options['verbose'])
|
||||
self.setup_logging(quiet=options['quiet'], verbose=options['verbose'])
|
||||
parameters = taskgraph.parameters.load_parameters_file(options['parameters'])
|
||||
parameters.check()
|
||||
|
||||
@ -405,7 +399,7 @@ class MachCommands(MachCommandBase):
|
||||
import taskgraph.actions
|
||||
|
||||
try:
|
||||
self.setup(quiet=options['quiet'], verbose=options['verbose'])
|
||||
self.setup_logging(quiet=options['quiet'], verbose=options['verbose'])
|
||||
parameters = taskgraph.parameters.load_parameters_file(options['parameters'])
|
||||
parameters.check()
|
||||
|
||||
|
@ -31,16 +31,9 @@ make -j$(getconf _NPROCESSORS_ONLN)
|
||||
strip dmg/dmg hfs/hfsplus
|
||||
cp dmg/dmg hfs/hfsplus $STAGE
|
||||
|
||||
# duplicate the functionality of taskcluster-lib-urls, but in bash..
|
||||
if [ "$TASKCLUSTER_ROOT_URL" = "https://taskcluster.net" ]; then
|
||||
queue_base='https://queue.taskcluster.net/v1'
|
||||
else
|
||||
queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1"
|
||||
fi
|
||||
|
||||
cat >$STAGE/README<<EOF
|
||||
Built from ${LIBDMG_REPOSITORY} rev `git rev-parse ${LIBDMG_REV}`.
|
||||
Source is available as a taskcluster artifact:
|
||||
$queue_base/task/$TASK_ID/artifacts/public/libdmg-hfsplus.tar.xz
|
||||
https://queue.taskcluster.net/v1/task/$TASK_ID/artifacts/public/libdmg-hfsplus.tar.xz
|
||||
EOF
|
||||
tar cf - -C $WORKSPACE `basename $STAGE` | xz > $UPLOAD_DIR/dmg.tar.xz
|
||||
|
@ -26,6 +26,11 @@ except ImportError:
|
||||
zstandard = None
|
||||
|
||||
|
||||
PUBLIC_ARTIFACT_URL = ('https://queue.taskcluster.net/v1/task/{task}/artifacts/'
|
||||
'{artifact}')
|
||||
PRIVATE_ARTIFACT_URL = ('http://taskcluster/queue/v1/task/{task}/artifacts/'
|
||||
'{artifact}')
|
||||
|
||||
CONCURRENCY = multiprocessing.cpu_count()
|
||||
|
||||
|
||||
@ -372,16 +377,6 @@ def command_static_url(args):
|
||||
raise
|
||||
|
||||
|
||||
def api(root_url, service, version, path):
|
||||
# taskcluster-lib-urls is not available when this script runs, so
|
||||
# simulate its behavior:
|
||||
if root_url == 'https://taskcluster.net':
|
||||
return 'https://{service}.taskcluster.net/{version}/{path}'.format(
|
||||
service=service, version=version, path=path)
|
||||
return 'https://{root_url}/api/{service}/{version}/{path}'.format(
|
||||
root_url=root_url, service=service, version=version, path=path)
|
||||
|
||||
|
||||
def command_task_artifacts(args):
|
||||
fetches = json.loads(os.environ['MOZ_FETCHES'])
|
||||
downloads = []
|
||||
@ -390,16 +385,11 @@ def command_task_artifacts(args):
|
||||
if 'dest' in fetch:
|
||||
extdir = extdir.joinpath(fetch['dest'])
|
||||
extdir.mkdir(parents=True, exist_ok=True)
|
||||
root_url = os.environ['TASKCLUSTER_ROOT_URL']
|
||||
if fetch['artifact'].startswith('public/'):
|
||||
path = 'task/{task}/artifacts/{artifact}'.format(
|
||||
task=fetch['task'], artifact=fetch['artifact'])
|
||||
url = api(root_url, 'queue', 'v1', path)
|
||||
url = PUBLIC_ARTIFACT_URL.format(task=fetch['task'],
|
||||
artifact=fetch['artifact'])
|
||||
else:
|
||||
# Until bug 1460015 is finished, use the old baseUrl style proxy URLs
|
||||
url = ('{proxy_url}/queue/v1/task/{task}/artifacts/{artifact}').format(
|
||||
proxy_url=os.environ['TASKCLUSTER_PROXY_URL'],
|
||||
task=fetch['task'],
|
||||
url = PRIVATE_ARTIFACT_URL.format(task=fetch['task'],
|
||||
artifact=fetch['artifact'])
|
||||
downloads.append((url, extdir, fetch['extract']))
|
||||
|
||||
|
@ -5,7 +5,6 @@
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import logging
|
||||
|
||||
GECKO = os.path.realpath(os.path.join(__file__, '..', '..', '..'))
|
||||
|
||||
@ -19,21 +18,3 @@ MAX_DEPENDENCIES = 99
|
||||
# This is normally switched on via the --fast/-F flag to `mach taskgraph`
|
||||
# Currently this skips toolchain task optimizations and schema validation
|
||||
fast = False
|
||||
|
||||
# Default rootUrl to use for command-line invocations
|
||||
PRODUCTION_TASKCLUSTER_ROOT_URL = 'https://taskcluster.net'
|
||||
|
||||
|
||||
def set_root_url_env():
|
||||
"""Ensure that TASKCLUSTER_ROOT_URL is set, defaulting when run outside of a task."""
|
||||
logger = logging.getLogger('set_root_url_env')
|
||||
|
||||
if 'TASKCLUSTER_ROOT_URL' not in os.environ:
|
||||
if 'TASK_ID' in os.environ:
|
||||
raise RuntimeError('TASKCLUSTER_ROOT_URL must be set when running in a task')
|
||||
else:
|
||||
logger.info('Setting TASKCLUSTER_ROOT_URL to default value (Firefox CI production)')
|
||||
os.environ['TASKCLUSTER_ROOT_URL'] = PRODUCTION_TASKCLUSTER_ROOT_URL
|
||||
logger.info('Running in Taskcluster instance {}{}'.format(
|
||||
os.environ['TASKCLUSTER_ROOT_URL'],
|
||||
' with taskcluster-proxy' if 'TASKCLUSTER_PROXY_URL' in os.environ else ''))
|
||||
|
@ -10,15 +10,33 @@ import concurrent.futures as futures
|
||||
import logging
|
||||
import os
|
||||
|
||||
from taskgraph.util.taskcluster import list_task_group, cancel_task
|
||||
from taskgraph.util.taskcluster import get_session, cancel_task
|
||||
from .registry import register_callback_action
|
||||
|
||||
# the maximum number of parallel cancelTask calls to make
|
||||
CONCURRENCY = 50
|
||||
|
||||
base_url = 'https://queue.taskcluster.net/v1/{}'
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def list_group(task_group_id, session):
|
||||
params = {}
|
||||
while True:
|
||||
url = base_url.format('task-group/{}/list'.format(task_group_id))
|
||||
response = session.get(url, stream=True, params=params)
|
||||
response.raise_for_status()
|
||||
response = response.json()
|
||||
for task in [t['status'] for t in response['tasks']]:
|
||||
if task['state'] in ['running', 'pending', 'unscheduled']:
|
||||
yield task['taskId']
|
||||
if response.get('continuationToken'):
|
||||
params = {'continuationToken': response.get('continuationToken')}
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
@register_callback_action(
|
||||
title='Cancel All',
|
||||
name='cancel-all',
|
||||
@ -33,11 +51,12 @@ logger = logging.getLogger(__name__)
|
||||
context=[]
|
||||
)
|
||||
def cancel_all_action(parameters, graph_config, input, task_group_id, task_id, task):
|
||||
session = get_session()
|
||||
own_task_id = os.environ.get('TASK_ID', '')
|
||||
with futures.ThreadPoolExecutor(CONCURRENCY) as e:
|
||||
cancels_jobs = [
|
||||
e.submit(cancel_task, t, use_proxy=True)
|
||||
for t in list_task_group(task_group_id) if t != own_task_id
|
||||
for t in list_group(task_group_id, session) if t != own_task_id
|
||||
]
|
||||
for job in cancels_jobs:
|
||||
job.result()
|
||||
|
@ -8,8 +8,6 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
import re
|
||||
import os
|
||||
import taskcluster_urls
|
||||
|
||||
from .util import (
|
||||
create_tasks,
|
||||
@ -155,9 +153,7 @@ def create_interactive_action(parameters, graph_config, input, task_group_id, ta
|
||||
return
|
||||
|
||||
info = {
|
||||
'url': taskcluster_urls.ui(
|
||||
os.environ['TASKCLUSTER_ROOT_URL'],
|
||||
'tasks/{}/connect'.format(taskId)),
|
||||
'url': 'https://tools.taskcluster.net/tasks/{}/connect'.format(taskId),
|
||||
'label': label,
|
||||
'revision': parameters['head_rev'],
|
||||
'repo': parameters['head_repository'],
|
||||
|
@ -11,11 +11,10 @@ locally, so they should be limited to changes that do not modify the meaning of
|
||||
the graph.
|
||||
"""
|
||||
|
||||
# Note that the translation of `{'task-reference': '..'}` and
|
||||
# `artifact-reference` are handled in the optimization phase (since
|
||||
# optimization involves dealing with taskIds directly). Similarly,
|
||||
# `{'relative-datestamp': '..'}` is handled at the last possible moment during
|
||||
# task creation.
|
||||
# Note that the translation of `{'task-reference': '..'}` is handled in the
|
||||
# optimization phase (since optimization involves dealing with taskIds
|
||||
# directly). Similarly, `{'relative-datestamp': '..'}` is handled at the last
|
||||
# possible moment during task creation.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
|
@ -10,8 +10,6 @@ import stat
|
||||
import tarfile
|
||||
import tempfile
|
||||
import unittest
|
||||
import mock
|
||||
import taskcluster_urls as liburls
|
||||
|
||||
from taskgraph.util import docker
|
||||
from mozunit import main, MockedOpen
|
||||
@ -20,7 +18,6 @@ from mozunit import main, MockedOpen
|
||||
MODE_STANDARD = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
|
||||
|
||||
|
||||
@mock.patch.dict('os.environ', {'TASKCLUSTER_ROOT_URL': liburls.test_root_url()})
|
||||
class TestDocker(unittest.TestCase):
|
||||
|
||||
def test_generate_context_hash(self):
|
||||
|
@ -6,8 +6,6 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import unittest
|
||||
import datetime
|
||||
import mock
|
||||
import os
|
||||
|
||||
from mozunit import main
|
||||
from taskgraph.util.parameterization import (
|
||||
@ -90,46 +88,5 @@ class TestTaskRefs(unittest.TestCase):
|
||||
)
|
||||
|
||||
|
||||
class TestArtifactRefs(unittest.TestCase):
|
||||
|
||||
def do(self, input, output):
|
||||
taskid_for_edge_name = {'edge%d' % n: 'tid%d' % n for n in range(1, 4)}
|
||||
with mock.patch.dict(os.environ, {'TASKCLUSTER_ROOT_URL': 'https://tc-tests.localhost'}):
|
||||
self.assertEqual(resolve_task_references('subject', input, taskid_for_edge_name),
|
||||
output)
|
||||
|
||||
def test_in_list(self):
|
||||
"resolve_task_references resolves artifact references in a list"
|
||||
self.do(
|
||||
{'in-a-list': [
|
||||
'stuff', {'artifact-reference': '<edge1/public/foo/bar>'}]},
|
||||
{'in-a-list': [
|
||||
'stuff', 'https://tc-tests.localhost/api/queue/v1'
|
||||
'/task/tid1/artifacts/public/foo/bar']})
|
||||
|
||||
def test_in_dict(self):
|
||||
"resolve_task_references resolves artifact references in a dict"
|
||||
self.do(
|
||||
{'in-a-dict':
|
||||
{'stuff': {'artifact-reference': '<edge2/public/bar/foo>'}}},
|
||||
{'in-a-dict':
|
||||
{'stuff': 'https://tc-tests.localhost/api/queue/v1'
|
||||
'/task/tid2/artifacts/public/bar/foo'}})
|
||||
|
||||
def test_in_string(self):
|
||||
"resolve_task_references resolves artifact references embedded in a string"
|
||||
self.do(
|
||||
{'stuff': {'artifact-reference': '<edge1/public/filename> and <edge2/public/bar>'}},
|
||||
{'stuff': 'https://tc-tests.localhost/api/queue/v1'
|
||||
'/task/tid1/artifacts/public/filename and '
|
||||
'https://tc-tests.localhost/api/queue/v1/task/tid2/artifacts/public/bar'})
|
||||
|
||||
def test_invalid(self):
|
||||
"resolve_task_references ignores badly-formatted artifact references"
|
||||
for inv in ['<edge1>', 'edge1/foo>', '<edge1>/foo', '<edge1>foo']:
|
||||
resolved = resolve_task_references('subject', {'artifact-reference': inv}, {})
|
||||
self.assertEqual(resolved, inv)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -17,13 +17,18 @@ from taskgraph.util.scriptworker import (
|
||||
get_balrog_server_scope, get_worker_type_for_scope
|
||||
)
|
||||
from taskgraph.transforms.task import task_description_schema
|
||||
from voluptuous import Optional
|
||||
from voluptuous import Any, Required, Optional
|
||||
|
||||
|
||||
# Voluptuous uses marker objects as dictionary *keys*, but they are not
|
||||
# comparable, so we cast all of the keys back to regular strings
|
||||
task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
|
||||
|
||||
# shortcut for a string where task references are allowed
|
||||
taskref_or_string = Any(
|
||||
basestring,
|
||||
{Required('task-reference'): basestring})
|
||||
|
||||
balrog_description_schema = schema.extend({
|
||||
# unique label to describe this balrog task, defaults to balrog-{dep.label}
|
||||
Optional('label'): basestring,
|
||||
|
@ -7,7 +7,7 @@ Transform the beetmover task into an actual task description.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
from voluptuous import Optional, Required
|
||||
from voluptuous import Any, Optional, Required
|
||||
|
||||
from taskgraph.loader.single_dep import schema
|
||||
from taskgraph.transforms.base import TransformSequence
|
||||
@ -119,6 +119,11 @@ task_description_schema = {str(k): v for k, v in task_description_schema.schema.
|
||||
|
||||
transforms = TransformSequence()
|
||||
|
||||
# shortcut for a string where task references are allowed
|
||||
taskref_or_string = Any(
|
||||
basestring,
|
||||
{Required('task-reference'): basestring})
|
||||
|
||||
beetmover_description_schema = schema.extend({
|
||||
# depname is used in taskref's to identify the taskID of the unsigned things
|
||||
Required('depname', default='build'): basestring,
|
||||
|
@ -18,13 +18,17 @@ from taskgraph.util.scriptworker import (generate_beetmover_artifact_map,
|
||||
get_beetmover_bucket_scope,
|
||||
get_worker_type_for_scope,
|
||||
should_use_artifact_map)
|
||||
from voluptuous import Optional, Required
|
||||
from voluptuous import Any, Optional, Required
|
||||
from taskgraph.transforms.task import task_description_schema
|
||||
|
||||
# Voluptuous uses marker objects as dictionary *keys*, but they are not
|
||||
# comparable, so we cast all of the keys back to regular strings
|
||||
task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
|
||||
|
||||
taskref_or_string = Any(
|
||||
basestring,
|
||||
{Required('task-reference'): basestring})
|
||||
|
||||
beetmover_checksums_description_schema = schema.extend({
|
||||
Required('depname', default='build'): basestring,
|
||||
Required('attributes'): {basestring: object},
|
||||
|
@ -12,12 +12,16 @@ from taskgraph.transforms.base import TransformSequence
|
||||
from taskgraph.transforms.beetmover import craft_release_properties
|
||||
from taskgraph.util.attributes import copy_attributes_from_dependent_job
|
||||
from taskgraph.transforms.task import task_description_schema
|
||||
from voluptuous import Required, Optional
|
||||
from voluptuous import Any, Required, Optional
|
||||
|
||||
# Voluptuous uses marker objects as dictionary *keys*, but they are not
|
||||
# comparable, so we cast all of the keys back to regular strings
|
||||
task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
|
||||
|
||||
taskref_or_string = Any(
|
||||
basestring,
|
||||
{Required('task-reference'): basestring})
|
||||
|
||||
beetmover_checksums_description_schema = schema.extend({
|
||||
Required('depname', default='build'): basestring,
|
||||
Optional('label'): basestring,
|
||||
|
@ -15,12 +15,16 @@ from taskgraph.util.scriptworker import (get_beetmover_bucket_scope,
|
||||
get_beetmover_action_scope,
|
||||
get_worker_type_for_scope)
|
||||
from taskgraph.transforms.task import task_description_schema
|
||||
from voluptuous import Required, Optional
|
||||
from voluptuous import Any, Required, Optional
|
||||
|
||||
# Voluptuous uses marker objects as dictionary *keys*, but they are not
|
||||
# comparable, so we cast all of the keys back to regular strings
|
||||
task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
|
||||
|
||||
taskref_or_string = Any(
|
||||
basestring,
|
||||
{Required('task-reference'): basestring})
|
||||
|
||||
beetmover_checksums_description_schema = schema.extend({
|
||||
Required('depname', default='build'): basestring,
|
||||
Optional('label'): basestring,
|
||||
|
@ -8,17 +8,14 @@ Transform the beetmover-push-to-release task into a task description.
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
from taskgraph.transforms.base import TransformSequence
|
||||
from taskgraph.util.schema import (
|
||||
Schema,
|
||||
taskref_or_string,
|
||||
)
|
||||
from taskgraph.util.schema import Schema
|
||||
from taskgraph.util.scriptworker import (
|
||||
get_beetmover_bucket_scope, add_scope_prefix,
|
||||
get_worker_type_for_scope,
|
||||
)
|
||||
from taskgraph.transforms.job import job_description_schema
|
||||
from taskgraph.transforms.task import task_description_schema
|
||||
from voluptuous import Required, Optional
|
||||
from voluptuous import Any, Required, Optional
|
||||
|
||||
# Voluptuous uses marker objects as dictionary *keys*, but they are not
|
||||
# comparable, so we cast all of the keys back to regular strings
|
||||
@ -26,6 +23,10 @@ task_description_schema = {str(k): v for k, v in task_description_schema.schema.
|
||||
job_description_schema = {str(k): v for k, v in job_description_schema.schema.iteritems()}
|
||||
|
||||
|
||||
taskref_or_string = Any(
|
||||
basestring,
|
||||
{Required('task-reference'): basestring})
|
||||
|
||||
beetmover_push_to_release_description_schema = Schema({
|
||||
Required('name'): basestring,
|
||||
Required('product'): basestring,
|
||||
|
@ -19,7 +19,7 @@ from taskgraph.util.scriptworker import (get_beetmover_bucket_scope,
|
||||
get_worker_type_for_scope)
|
||||
from taskgraph.util.taskcluster import get_artifact_prefix
|
||||
from taskgraph.transforms.task import task_description_schema
|
||||
from voluptuous import Required, Optional
|
||||
from voluptuous import Any, Required, Optional
|
||||
|
||||
import logging
|
||||
import re
|
||||
@ -146,6 +146,11 @@ UPSTREAM_ARTIFACT_SIGNED_MAR_PATHS = [
|
||||
# comparable, so we cast all of the keys back to regular strings
|
||||
task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
|
||||
|
||||
# shortcut for a string where task references are allowed
|
||||
taskref_or_string = Any(
|
||||
basestring,
|
||||
{Required('task-reference'): basestring})
|
||||
|
||||
beetmover_description_schema = schema.extend({
|
||||
# depname is used in taskref's to identify the taskID of the unsigned things
|
||||
Required('depname', default='build'): basestring,
|
||||
|
@ -39,6 +39,11 @@ logger = logging.getLogger(__name__)
|
||||
# comparable, so we cast all of the keys back to regular strings
|
||||
task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
|
||||
|
||||
# shortcut for a string where task references are allowed
|
||||
taskref_or_string = Any(
|
||||
basestring,
|
||||
{Required('task-reference'): basestring})
|
||||
|
||||
beetmover_description_schema = schema.extend({
|
||||
# depname is used in taskref's to identify the taskID of the unsigned things
|
||||
Required('depname', default='build'): basestring,
|
||||
|
@ -18,12 +18,16 @@ from taskgraph.util.scriptworker import (generate_beetmover_artifact_map,
|
||||
get_worker_type_for_scope,
|
||||
should_use_artifact_map)
|
||||
from taskgraph.transforms.task import task_description_schema
|
||||
from voluptuous import Required, Optional
|
||||
from voluptuous import Any, Required, Optional
|
||||
|
||||
# Voluptuous uses marker objects as dictionary *keys*, but they are not
|
||||
# comparable, so we cast all of the keys back to regular strings
|
||||
task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
|
||||
|
||||
taskref_or_string = Any(
|
||||
basestring,
|
||||
{Required('task-reference'): basestring})
|
||||
|
||||
beetmover_checksums_description_schema = schema.extend({
|
||||
Required('depname', default='build'): basestring,
|
||||
Optional('label'): basestring,
|
||||
|
@ -16,12 +16,16 @@ from taskgraph.util.scriptworker import (
|
||||
add_scope_prefix,
|
||||
)
|
||||
from taskgraph.transforms.task import task_description_schema
|
||||
from voluptuous import Required, Optional
|
||||
from voluptuous import Any, Required, Optional
|
||||
|
||||
# Voluptuous uses marker objects as dictionary *keys*, but they are not
|
||||
# comparable, so we cast all of the keys back to regular strings
|
||||
task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
|
||||
|
||||
taskref_or_string = Any(
|
||||
basestring,
|
||||
{Required('task-reference'): basestring})
|
||||
|
||||
checksums_signing_description_schema = schema.extend({
|
||||
Required('depname', default='beetmover'): basestring,
|
||||
Optional('label'): basestring,
|
||||
|
@ -12,7 +12,7 @@ from taskgraph.transforms.base import TransformSequence
|
||||
from taskgraph.util.schema import (
|
||||
Schema,
|
||||
)
|
||||
from taskgraph.util.taskcluster import get_artifact_path
|
||||
from taskgraph.util.taskcluster import get_artifact_path, get_artifact_url
|
||||
from voluptuous import (
|
||||
Any,
|
||||
Optional,
|
||||
@ -64,7 +64,7 @@ def fill_template(config, tasks):
|
||||
value = task[k]
|
||||
if isinstance(value, basestring):
|
||||
deps[k] = value
|
||||
dep_name = k
|
||||
task_id = '<{}>'.format(k)
|
||||
os_hint = value
|
||||
else:
|
||||
index = value['index-search']
|
||||
@ -82,7 +82,7 @@ def fill_template(config, tasks):
|
||||
}
|
||||
yield dummy_tasks[index]
|
||||
deps[index] = 'index-search-' + index
|
||||
dep_name = index
|
||||
task_id = '<{}>'.format(index)
|
||||
os_hint = index.split('.')[-1]
|
||||
if 'linux' in os_hint:
|
||||
artifact = 'target.tar.bz2'
|
||||
@ -98,10 +98,8 @@ def fill_template(config, tasks):
|
||||
if previous_artifact is not None and previous_artifact != artifact:
|
||||
raise Exception(
|
||||
'Cannot compare builds from different OSes')
|
||||
urls[k] = {
|
||||
'artifact-reference': '<{}/{}>'.format(
|
||||
dep_name, get_artifact_path(task, artifact)),
|
||||
}
|
||||
url = get_artifact_url(task_id, get_artifact_path(task, artifact))
|
||||
urls[k] = {'task-reference': url}
|
||||
previous_artifact = artifact
|
||||
|
||||
taskdesc = {
|
||||
|
@ -111,8 +111,6 @@ def fill_template(config, tasks):
|
||||
if parent:
|
||||
args['DOCKER_IMAGE_PARENT'] = '{}:{}'.format(parent, context_hashes[parent])
|
||||
|
||||
args['TASKCLUSTER_ROOT_URL'] = os.environ['TASKCLUSTER_ROOT_URL']
|
||||
|
||||
if not taskgraph.fast:
|
||||
context_path = os.path.join('taskcluster', 'docker', definition)
|
||||
context_hash = generate_context_hash(
|
||||
|
@ -9,6 +9,7 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
from taskgraph.transforms.base import TransformSequence
|
||||
from taskgraph.util.schema import resolve_keyed_by
|
||||
from taskgraph.util.taskcluster import get_taskcluster_artifact_prefix
|
||||
|
||||
transforms = TransformSequence()
|
||||
|
||||
@ -23,12 +24,14 @@ def add_command(config, tasks):
|
||||
for upstream in task.get("dependencies", {}).keys():
|
||||
if 'update-verify-config' in upstream:
|
||||
final_verify_configs.append(
|
||||
"<{}/public/build/update-verify.cfg>".format(upstream),
|
||||
"{}update-verify.cfg".format(
|
||||
get_taskcluster_artifact_prefix(task, "<{}>".format(upstream))
|
||||
)
|
||||
)
|
||||
task['run'] = {
|
||||
'using': 'run-task',
|
||||
'command': {
|
||||
'artifact-reference': 'cd /builds/worker/checkouts/gecko && '
|
||||
'task-reference': 'cd /builds/worker/checkouts/gecko && '
|
||||
'tools/update-verify/release/final-verification.sh '
|
||||
+ ' '.join(final_verify_configs),
|
||||
},
|
||||
|
@ -9,7 +9,6 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import re
|
||||
import taskcluster_urls
|
||||
|
||||
from taskgraph.util.schema import Schema
|
||||
from voluptuous import Any, Optional, Required
|
||||
@ -153,8 +152,6 @@ def docker_worker_debian_package(config, job, taskdesc):
|
||||
dist=run['dist'],
|
||||
)
|
||||
|
||||
queue_url = taskcluster_urls.api(os.environ['TASKCLUSTER_ROOT_URL'], 'queue', 'v1', '')
|
||||
|
||||
# We can't depend on docker images (since docker images depend on packages),
|
||||
# so we inline the whole script here.
|
||||
worker['command'] = [
|
||||
@ -174,7 +171,8 @@ def docker_worker_debian_package(config, job, taskdesc):
|
||||
# Add sources for packages coming from other package tasks.
|
||||
'apt-get install -yyq apt-transport-https ca-certificates && '
|
||||
'for task in $PACKAGES; do '
|
||||
' echo "deb [trusted=yes] {queue_url}task/$task/artifacts/public/build/ debian/" '
|
||||
' echo "deb [trusted=yes] https://queue.taskcluster.net/v1/task'
|
||||
'/$task/artifacts/public/build/ debian/" '
|
||||
'>> /etc/apt/sources.list; '
|
||||
'done && '
|
||||
# Install the base utilities required to build debian packages.
|
||||
@ -200,7 +198,6 @@ def docker_worker_debian_package(config, job, taskdesc):
|
||||
'apt-ftparchive sources debian | gzip -c9 > debian/Sources.gz && '
|
||||
'apt-ftparchive packages debian | gzip -c9 > debian/Packages.gz'
|
||||
.format(
|
||||
queue_url=queue_url,
|
||||
package=package,
|
||||
snapshot=run['snapshot'],
|
||||
dist=run['dist'],
|
||||
|
@ -8,17 +8,14 @@ Support for running mach tasks (via run-task)
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
from taskgraph.transforms.job import run_job_using, configure_taskdesc_for_run
|
||||
from taskgraph.util.schema import (
|
||||
Schema,
|
||||
taskref_or_string,
|
||||
)
|
||||
from taskgraph.util.schema import Schema
|
||||
from voluptuous import Required, Optional, Any
|
||||
|
||||
mach_schema = Schema({
|
||||
Required('using'): 'mach',
|
||||
|
||||
# The mach command (omitting `./mach`) to run
|
||||
Required('mach'): taskref_or_string,
|
||||
Required('mach'): basestring,
|
||||
|
||||
# The sparse checkout profile to use. Value is the filename relative to the
|
||||
# directory where sparse profiles are defined (build/sparse-profiles/).
|
||||
@ -44,16 +41,8 @@ defaults = {
|
||||
def configure_mach(config, job, taskdesc):
|
||||
run = job['run']
|
||||
|
||||
command_prefix = 'cd $GECKO_PATH && ./mach '
|
||||
mach = run['mach']
|
||||
if isinstance(mach, dict):
|
||||
ref, pattern = next(iter(mach.items()))
|
||||
command = {ref: command_prefix + pattern}
|
||||
else:
|
||||
command = command_prefix + mach
|
||||
|
||||
# defer to the run_task implementation
|
||||
run['command'] = command
|
||||
run['command'] = 'cd $GECKO_PATH && ./mach {mach}'.format(**run)
|
||||
run['using'] = 'run-task'
|
||||
del run['mach']
|
||||
configure_taskdesc_for_run(config, job, taskdesc, job['worker']['implementation'])
|
||||
|
@ -18,7 +18,6 @@ from taskgraph.transforms.base import (
|
||||
from taskgraph.util.schema import (
|
||||
optionally_keyed_by,
|
||||
resolve_keyed_by,
|
||||
taskref_or_string,
|
||||
)
|
||||
from taskgraph.util.attributes import copy_attributes_from_dependent_job
|
||||
from taskgraph.util.taskcluster import get_artifact_prefix
|
||||
@ -36,6 +35,11 @@ def _by_platform(arg):
|
||||
return optionally_keyed_by('build-platform', arg)
|
||||
|
||||
|
||||
# shortcut for a string where task references are allowed
|
||||
taskref_or_string = Any(
|
||||
basestring,
|
||||
{Required('task-reference'): basestring})
|
||||
|
||||
# Voluptuous uses marker objects as dictionary *keys*, but they are not
|
||||
# comparable, so we cast all of the keys back to regular strings
|
||||
job_description_schema = {str(k): v for k, v in job_description_schema.schema.iteritems()}
|
||||
|
@ -9,7 +9,7 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
from taskgraph.transforms.base import TransformSequence
|
||||
from taskgraph.util.attributes import copy_attributes_from_dependent_job
|
||||
from taskgraph.util.partials import get_balrog_platform_name, get_builds
|
||||
from taskgraph.util.taskcluster import get_artifact_prefix
|
||||
from taskgraph.util.taskcluster import get_taskcluster_artifact_prefix, get_artifact_prefix
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -76,20 +76,19 @@ def make_task_description(config, jobs):
|
||||
if not builds:
|
||||
continue
|
||||
|
||||
dep_task_ref = '<{}>'.format(dependent_kind)
|
||||
|
||||
extra = {'funsize': {'partials': list()}}
|
||||
update_number = 1
|
||||
|
||||
locale_suffix = ''
|
||||
if locale:
|
||||
locale_suffix = '{}/'.format(locale)
|
||||
artifact_path = "<{}/{}/{}target.complete.mar>".format(
|
||||
dependent_kind, get_artifact_prefix(dep_job), locale_suffix,
|
||||
artifact_path = "{}{}".format(
|
||||
get_taskcluster_artifact_prefix(dep_job, dep_task_ref, locale=locale),
|
||||
'target.complete.mar'
|
||||
)
|
||||
for build in sorted(builds):
|
||||
partial_info = {
|
||||
'locale': build_locale,
|
||||
'from_mar': builds[build]['mar_url'],
|
||||
'to_mar': {'artifact-reference': artifact_path},
|
||||
'to_mar': {'task-reference': artifact_path},
|
||||
'platform': get_balrog_platform_name(dep_th_platform),
|
||||
'branch': config.params['project'],
|
||||
'update_number': update_number,
|
||||
|
@ -20,13 +20,18 @@ from taskgraph.util.taskcluster import get_artifact_prefix
|
||||
from taskgraph.util.platforms import archive_format, executable_extension
|
||||
from taskgraph.util.workertypes import worker_type_implementation
|
||||
from taskgraph.transforms.job import job_description_schema
|
||||
from voluptuous import Required, Optional
|
||||
from voluptuous import Any, Required, Optional
|
||||
|
||||
# Voluptuous uses marker objects as dictionary *keys*, but they are not
|
||||
# comparable, so we cast all of the keys back to regular strings
|
||||
job_description_schema = {str(k): v for k, v in job_description_schema.schema.iteritems()}
|
||||
|
||||
|
||||
# shortcut for a string where task references are allowed
|
||||
taskref_or_string = Any(
|
||||
basestring,
|
||||
{Required('task-reference'): basestring})
|
||||
|
||||
packaging_description_schema = schema.extend({
|
||||
# depname is used in taskref's to identify the taskID of the signed things
|
||||
Required('depname', default='build'): basestring,
|
||||
|
@ -22,7 +22,7 @@ from taskgraph.util.platforms import archive_format, executable_extension
|
||||
from taskgraph.util.workertypes import worker_type_implementation
|
||||
from taskgraph.transforms.task import task_description_schema
|
||||
from taskgraph.transforms.repackage import PACKAGE_FORMATS
|
||||
from voluptuous import Required, Optional
|
||||
from voluptuous import Any, Required, Optional
|
||||
|
||||
# Voluptuous uses marker objects as dictionary *keys*, but they are not
|
||||
# comparable, so we cast all of the keys back to regular strings
|
||||
@ -33,6 +33,11 @@ def _by_platform(arg):
|
||||
return optionally_keyed_by('build-platform', arg)
|
||||
|
||||
|
||||
# shortcut for a string where task references are allowed
|
||||
taskref_or_string = Any(
|
||||
basestring,
|
||||
{Required('task-reference'): basestring})
|
||||
|
||||
packaging_description_schema = schema.extend({
|
||||
# depname is used in taskref's to identify the taskID of the signed things
|
||||
Required('depname', default='build'): basestring,
|
||||
|
@ -10,14 +10,13 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
from taskgraph.loader.single_dep import schema
|
||||
from taskgraph.transforms.base import TransformSequence
|
||||
from taskgraph.util.attributes import copy_attributes_from_dependent_job
|
||||
from taskgraph.util.schema import taskref_or_string
|
||||
from taskgraph.util.scriptworker import (
|
||||
add_scope_prefix,
|
||||
get_signing_cert_scope_per_platform,
|
||||
get_worker_type_for_scope,
|
||||
)
|
||||
from taskgraph.transforms.task import task_description_schema
|
||||
from voluptuous import Required, Optional
|
||||
from voluptuous import Any, Required, Optional
|
||||
|
||||
|
||||
# Voluptuous uses marker objects as dictionary *keys*, but they are not
|
||||
@ -26,6 +25,11 @@ task_description_schema = {str(k): v for k, v in task_description_schema.schema.
|
||||
|
||||
transforms = TransformSequence()
|
||||
|
||||
# shortcut for a string where task references are allowed
|
||||
taskref_or_string = Any(
|
||||
basestring,
|
||||
{Required('task-reference'): basestring})
|
||||
|
||||
signing_description_schema = schema.extend({
|
||||
# Artifacts from dep task to sign - Sync with taskgraph/transforms/task.py
|
||||
# because this is passed directly into the signingscript worker
|
||||
|
@ -16,12 +16,16 @@ from taskgraph.util.scriptworker import (
|
||||
add_scope_prefix,
|
||||
)
|
||||
from taskgraph.transforms.task import task_description_schema
|
||||
from voluptuous import Required, Optional
|
||||
from voluptuous import Any, Required, Optional
|
||||
|
||||
# Voluptuous uses marker objects as dictionary *keys*, but they are not
|
||||
# comparable, so we cast all of the keys back to regular strings
|
||||
task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
|
||||
|
||||
taskref_or_string = Any(
|
||||
basestring,
|
||||
{Required('task-reference'): basestring})
|
||||
|
||||
checksums_signing_description_schema = schema.extend({
|
||||
Required('depname', default='beetmover'): basestring,
|
||||
Optional('label'): basestring,
|
||||
|
@ -27,6 +27,8 @@ from voluptuous import (
|
||||
Schema,
|
||||
)
|
||||
|
||||
ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
|
||||
|
||||
job_description_schema = {str(k): v for k, v in job_description_schema.schema.iteritems()}
|
||||
|
||||
source_test_description_schema = Schema({
|
||||
|
@ -27,7 +27,6 @@ from taskgraph.util.schema import (
|
||||
optionally_keyed_by,
|
||||
resolve_keyed_by,
|
||||
OptimizationSchema,
|
||||
taskref_or_string,
|
||||
)
|
||||
from taskgraph.util.scriptworker import (
|
||||
BALROG_ACTIONS,
|
||||
@ -48,6 +47,12 @@ def _run_task_suffix():
|
||||
return hash_path(RUN_TASK)[0:20]
|
||||
|
||||
|
||||
# shortcut for a string where task references are allowed
|
||||
taskref_or_string = Any(
|
||||
basestring,
|
||||
{Required('task-reference'): basestring},
|
||||
)
|
||||
|
||||
# A task description is a general description of a TaskCluster task
|
||||
task_description_schema = Schema({
|
||||
# the label for this task
|
||||
@ -494,11 +499,6 @@ def build_docker_worker_payload(config, task, task_def):
|
||||
else:
|
||||
raise Exception("unknown docker image type")
|
||||
|
||||
# propagate our TASKCLUSTER_ROOT_URL to the task; note that this will soon
|
||||
# be provided directly by the worker, making this redundant:
|
||||
# https://bugzilla.mozilla.org/show_bug.cgi?id=1460015
|
||||
worker['env']['TASKCLUSTER_ROOT_URL'] = os.environ['TASKCLUSTER_ROOT_URL']
|
||||
|
||||
features = {}
|
||||
|
||||
if worker.get('relengapi-proxy'):
|
||||
@ -531,11 +531,6 @@ def build_docker_worker_payload(config, task, task_def):
|
||||
else:
|
||||
worker['env']['SCCACHE_DISABLE'] = '1'
|
||||
|
||||
# this will soon be provided directly by the worker:
|
||||
# https://bugzilla.mozilla.org/show_bug.cgi?id=1460015
|
||||
if features.get('taskclusterProxy'):
|
||||
worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster'
|
||||
|
||||
capabilities = {}
|
||||
|
||||
for lo in 'audio', 'video':
|
||||
@ -768,11 +763,6 @@ def build_generic_worker_payload(config, task, task_def):
|
||||
|
||||
env = worker.get('env', {})
|
||||
|
||||
# propagate our TASKCLUSTER_ROOT_URL to the task; note that this will soon
|
||||
# be provided directly by the worker, making this redundant:
|
||||
# https://bugzilla.mozilla.org/show_bug.cgi?id=1460015
|
||||
env['TASKCLUSTER_ROOT_URL'] = os.environ['TASKCLUSTER_ROOT_URL']
|
||||
|
||||
if task.get('needs-sccache'):
|
||||
env['USE_SCCACHE'] = '1'
|
||||
# Disable sccache idle shutdown.
|
||||
@ -827,9 +817,7 @@ def build_generic_worker_payload(config, task, task_def):
|
||||
|
||||
if worker.get('taskcluster-proxy'):
|
||||
features['taskclusterProxy'] = True
|
||||
# this will soon be provided directly by the worker:
|
||||
# https://bugzilla.mozilla.org/show_bug.cgi?id=1460015
|
||||
worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster'
|
||||
worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster/'
|
||||
|
||||
if worker.get('run-as-administrator', False):
|
||||
features['runAsAdministrator'] = True
|
||||
@ -1327,11 +1315,6 @@ def build_always_optimized_payload(config, task, task_def):
|
||||
})
|
||||
def build_macosx_engine_payload(config, task, task_def):
|
||||
worker = task['worker']
|
||||
|
||||
# propagate our TASKCLUSTER_ROOT_URL to the task; note that this will soon
|
||||
# be provided directly by the worker, making this redundant
|
||||
worker.setdefault('env', {})['TASKCLUSTER_ROOT_URL'] = os.environ['TASKCLUSTER_ROOT_URL']
|
||||
|
||||
artifacts = map(lambda artifact: {
|
||||
'name': artifact['name'],
|
||||
'path': artifact['path'],
|
||||
|
@ -208,7 +208,6 @@ def stream_context_tar(topsrcdir, context_dir, out_file, prefix, args=None):
|
||||
object."""
|
||||
archive_files = {}
|
||||
replace = []
|
||||
content = []
|
||||
|
||||
context_dir = os.path.join(topsrcdir, context_dir)
|
||||
|
||||
@ -220,6 +219,7 @@ def stream_context_tar(topsrcdir, context_dir, out_file, prefix, args=None):
|
||||
archive_files[archive_path] = source_path
|
||||
|
||||
# Parse Dockerfile for special syntax of extra files to include.
|
||||
content = []
|
||||
with open(os.path.join(context_dir, 'Dockerfile'), 'rb') as fh:
|
||||
for line in fh:
|
||||
if line.startswith('# %ARG'):
|
||||
|
@ -7,21 +7,20 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
import re
|
||||
|
||||
from taskgraph.util.time import json_time_from_now
|
||||
from taskgraph.util.taskcluster import get_artifact_url
|
||||
|
||||
TASK_REFERENCE_PATTERN = re.compile('<([^>]+)>')
|
||||
ARTIFACT_REFERENCE_PATTERN = re.compile('<([^/]+)/([^>]+)>')
|
||||
|
||||
|
||||
def _recurse(val, param_fns):
|
||||
def _recurse(val, param_name, param_fn):
|
||||
param_keys = [param_name]
|
||||
|
||||
def recurse(val):
|
||||
if isinstance(val, list):
|
||||
return [recurse(v) for v in val]
|
||||
elif isinstance(val, dict):
|
||||
if len(val) == 1:
|
||||
for param_key, param_fn in param_fns.items():
|
||||
if val.keys() == [param_key]:
|
||||
return param_fn(val[param_key])
|
||||
if val.keys() == param_keys:
|
||||
return param_fn(val[param_name])
|
||||
else:
|
||||
return {k: recurse(v) for k, v in val.iteritems()}
|
||||
else:
|
||||
return val
|
||||
@ -30,19 +29,12 @@ def _recurse(val, param_fns):
|
||||
|
||||
def resolve_timestamps(now, task_def):
|
||||
"""Resolve all instances of `{'relative-datestamp': '..'}` in the given task definition"""
|
||||
return _recurse(task_def, {
|
||||
'relative-datestamp': lambda v: json_time_from_now(v, now),
|
||||
})
|
||||
return _recurse(task_def, 'relative-datestamp', lambda v: json_time_from_now(v, now))
|
||||
|
||||
|
||||
def resolve_task_references(label, task_def, dependencies):
|
||||
"""Resolve all instances of
|
||||
{'task-reference': '..<..>..'}
|
||||
and
|
||||
{'artifact-reference`: '..<dependency/artifact/path>..'}
|
||||
in the given task definition, using the given dependencies"""
|
||||
|
||||
def task_reference(val):
|
||||
"""Resolve all instances of `{'task-reference': '..<..>..'}` in the given task
|
||||
definition, using the given dependencies"""
|
||||
def repl(match):
|
||||
key = match.group(1)
|
||||
try:
|
||||
@ -53,24 +45,4 @@ def resolve_task_references(label, task_def, dependencies):
|
||||
return key
|
||||
raise KeyError("task '{}' has no dependency named '{}'".format(label, key))
|
||||
|
||||
return TASK_REFERENCE_PATTERN.sub(repl, val)
|
||||
|
||||
def artifact_reference(val):
|
||||
def repl(match):
|
||||
dependency, artifact_name = match.group(1, 2)
|
||||
|
||||
try:
|
||||
task_id = dependencies[dependency]
|
||||
except KeyError:
|
||||
raise KeyError("task '{}' has no dependency named '{}'".format(label, dependency))
|
||||
|
||||
assert artifact_name.startswith('public/'), \
|
||||
"artifact-reference only supports public artifacts, not `{}`".format(artifact_name)
|
||||
return get_artifact_url(task_id, artifact_name)
|
||||
|
||||
return ARTIFACT_REFERENCE_PATTERN.sub(repl, val)
|
||||
|
||||
return _recurse(task_def, {
|
||||
'task-reference': task_reference,
|
||||
'artifact-reference': artifact_reference,
|
||||
})
|
||||
return _recurse(task_def, 'task-reference', lambda v: TASK_REFERENCE_PATTERN.sub(repl, v))
|
||||
|
@ -234,10 +234,3 @@ OptimizationSchema = voluptuous.Any(
|
||||
# are unnecessary if the parent tasks are not run)
|
||||
{'only-if-dependencies-run': None}
|
||||
)
|
||||
|
||||
# shortcut for a string where task references are allowed
|
||||
taskref_or_string = voluptuous.Any(
|
||||
basestring,
|
||||
{voluptuous.Required('task-reference'): basestring},
|
||||
{voluptuous.Required('artifact-reference'): basestring},
|
||||
)
|
||||
|
@ -6,18 +6,22 @@
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import datetime
|
||||
import functools
|
||||
import yaml
|
||||
import requests
|
||||
import logging
|
||||
import taskcluster_urls as liburls
|
||||
from mozbuild.util import memoize
|
||||
from requests.packages.urllib3.util.retry import Retry
|
||||
from requests.adapters import HTTPAdapter
|
||||
from taskgraph.task import Task
|
||||
|
||||
_PUBLIC_TC_ARTIFACT_LOCATION = \
|
||||
'https://queue.taskcluster.net/v1/task/{task_id}/artifacts/{artifact_prefix}/{postfix}'
|
||||
|
||||
_PRIVATE_TC_ARTIFACT_LOCATION = \
|
||||
'http://taskcluster/queue/v1/task/{task_id}/artifacts/{artifact_prefix}/{postfix}'
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# this is set to true for `mach taskgraph action-callback --test`
|
||||
@ -59,22 +63,17 @@ def _handle_artifact(path, response):
|
||||
|
||||
|
||||
def get_artifact_url(task_id, path, use_proxy=False):
|
||||
artifact_tmpl = liburls.api(
|
||||
os.environ['TASKCLUSTER_ROOT_URL'], 'queue', 'v1',
|
||||
'task/{}/artifacts/{}')
|
||||
data = artifact_tmpl.format(task_id, path)
|
||||
ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
|
||||
if use_proxy:
|
||||
# Until Bug 1405889 is deployed, we can't download directly
|
||||
# from the taskcluster-proxy. Work around by using the /bewit
|
||||
# endpoint instead.
|
||||
data = ARTIFACT_URL.format(task_id, path)
|
||||
# The bewit URL is the body of a 303 redirect, which we don't
|
||||
# want to follow (which fetches a potentially large resource).
|
||||
response = _do_request(
|
||||
os.environ['TASKCLUSTER_PROXY_URL'] + '/bewit',
|
||||
data=data,
|
||||
allow_redirects=False)
|
||||
response = _do_request('http://taskcluster/bewit', data=data, allow_redirects=False)
|
||||
return response.text
|
||||
return data
|
||||
return ARTIFACT_URL.format(task_id, path)
|
||||
|
||||
|
||||
def get_artifact(task_id, path, use_proxy=False):
|
||||
@ -110,12 +109,10 @@ def get_artifact_path(task, path):
|
||||
|
||||
def get_index_url(index_path, use_proxy=False, multiple=False):
|
||||
if use_proxy:
|
||||
# Until bug 1460015 is finished, use the old baseUrl style of proxy URL
|
||||
index_tmpl = os.environ['TASKCLUSTER_PROXY_URL'] + '/index/v1/task{}/{}'
|
||||
INDEX_URL = 'http://taskcluster/index/v1/task{}/{}'
|
||||
else:
|
||||
index_tmpl = liburls.api(
|
||||
os.environ['TASKCLUSTER_ROOT_URL'], 'index', 'v1', 'task{}/{}')
|
||||
return index_tmpl.format('s' if multiple else '', index_path)
|
||||
INDEX_URL = 'https://index.taskcluster.net/v1/task{}/{}'
|
||||
return INDEX_URL.format('s' if multiple else '', index_path)
|
||||
|
||||
|
||||
def find_task_id(index_path, use_proxy=False):
|
||||
@ -165,12 +162,10 @@ def parse_time(timestamp):
|
||||
|
||||
def get_task_url(task_id, use_proxy=False):
|
||||
if use_proxy:
|
||||
# Until bug 1460015 is finished, use the old baseUrl style of proxy URL
|
||||
task_tmpl = os.environ['TASKCLUSTER_PROXY_URL'] + '/queue/v1/task/{}'
|
||||
TASK_URL = 'http://taskcluster/queue/v1/task/{}'
|
||||
else:
|
||||
task_tmpl = liburls.api(
|
||||
os.environ['TASKCLUSTER_ROOT_URL'], 'queue', 'v1', 'task/{}')
|
||||
return task_tmpl.format(task_id)
|
||||
TASK_URL = 'https://queue.taskcluster.net/v1/task/{}'
|
||||
return TASK_URL.format(task_id)
|
||||
|
||||
|
||||
def get_task_definition(task_id, use_proxy=False):
|
||||
@ -210,19 +205,16 @@ def rerun_task(task_id):
|
||||
def get_current_scopes():
|
||||
"""Get the current scopes. This only makes sense in a task with the Taskcluster
|
||||
proxy enabled, where it returns the actual scopes accorded to the task."""
|
||||
# Until bug 1460015 is finished, use the old baseUrl style of proxy URL
|
||||
resp = _do_request(os.environ['TASKCLUSTER_PROXY_URL'] + '/auth/v1/scopes/current')
|
||||
resp = _do_request('http://taskcluster/auth/v1/scopes/current')
|
||||
return resp.json().get("scopes", [])
|
||||
|
||||
|
||||
def get_purge_cache_url(provisioner_id, worker_type, use_proxy=False):
|
||||
if use_proxy:
|
||||
# Until bug 1460015 is finished, use the old baseUrl style of proxy URL
|
||||
url_tmpl = os.environ['TASKCLUSTER_PROXY_URL'] + '/purge-cache/v1/purge-cache/{}/{}'
|
||||
TASK_URL = 'http://taskcluster/purge-cache/v1/purge-cache/{}/{}'
|
||||
else:
|
||||
url_tmpl = liburls.api(
|
||||
os.environ['TASKCLUSTER_ROOT_URL'], 'purge-cache', 'v1', 'purge-cache/{}/{}')
|
||||
return url_tmpl.format(provisioner_id, worker_type)
|
||||
TASK_URL = 'https://purge-cache.taskcluster.net/v1/purge-cache/{}/{}'
|
||||
return TASK_URL.format(provisioner_id, worker_type)
|
||||
|
||||
|
||||
def purge_cache(provisioner_id, worker_type, cache_name, use_proxy=False):
|
||||
@ -235,33 +227,31 @@ def purge_cache(provisioner_id, worker_type, cache_name, use_proxy=False):
|
||||
_do_request(purge_cache_url, json={'cacheName': cache_name})
|
||||
|
||||
|
||||
def get_taskcluster_artifact_prefix(task, task_id, postfix='', locale=None, force_private=False):
|
||||
if locale:
|
||||
postfix = '{}/{}'.format(locale, postfix)
|
||||
|
||||
artifact_prefix = get_artifact_prefix(task)
|
||||
if artifact_prefix == 'public/build' and not force_private:
|
||||
tmpl = _PUBLIC_TC_ARTIFACT_LOCATION
|
||||
else:
|
||||
tmpl = _PRIVATE_TC_ARTIFACT_LOCATION
|
||||
|
||||
return tmpl.format(
|
||||
task_id=task_id, postfix=postfix, artifact_prefix=artifact_prefix
|
||||
)
|
||||
|
||||
|
||||
def send_email(address, subject, content, link, use_proxy=False):
|
||||
"""Sends an email using the notify service"""
|
||||
logger.info('Sending email to {}.'.format(address))
|
||||
if use_proxy:
|
||||
# Until bug 1460015 is finished, use the old baseUrl style of proxy URL
|
||||
url = os.environ['TASKCLUSTER_PROXY_URL'] + '/notify/v1/email'
|
||||
url = 'http://taskcluster/notify/v1/email'
|
||||
else:
|
||||
url = liburls.api(os.environ['TASKCLUSTER_ROOT_URL'], 'notify', 'v1', 'email')
|
||||
url = 'https://notify.taskcluster.net/v1/email'
|
||||
_do_request(url, json={
|
||||
'address': address,
|
||||
'subject': subject,
|
||||
'content': content,
|
||||
'link': link,
|
||||
})
|
||||
|
||||
|
||||
def list_task_group(task_group_id):
|
||||
"""Generate the tasks in a task group"""
|
||||
params = {}
|
||||
while True:
|
||||
url = liburls.api(os.environ['TASKCLUSTER_ROOT_URL'], 'queue', 'v1',
|
||||
'task-group/{}/list'.format(task_group_id))
|
||||
resp = _do_request(url, params=params).json()
|
||||
for task in [t['status'] for t in resp['tasks']]:
|
||||
if task['state'] in ['running', 'pending', 'unscheduled']:
|
||||
yield task['taskId']
|
||||
if resp.get('continuationToken'):
|
||||
params = {'continuationToken': resp.get('continuationToken')}
|
||||
else:
|
||||
break
|
||||
|
1
third_party/python/requirements.in
vendored
1
third_party/python/requirements.in
vendored
@ -13,6 +13,5 @@ redo==2.0.2
|
||||
requests==2.9.1
|
||||
six==1.10.0
|
||||
taskcluster==4.0.1
|
||||
taskcluster-urls==11.0.0
|
||||
virtualenv==15.2.0
|
||||
voluptuous==0.11.5
|
||||
|
4
third_party/python/requirements.txt
vendored
4
third_party/python/requirements.txt
vendored
@ -100,10 +100,6 @@ six==1.10.0 \
|
||||
slugid==1.0.7 \
|
||||
--hash=sha256:6dab3c7eef0bb423fb54cb7752e0f466ddd0ee495b78b763be60e8a27f69e779 \
|
||||
# via taskcluster
|
||||
taskcluster-urls==11.0.0 \
|
||||
--hash=sha256:18dcaa9c2412d34ff6c78faca33f0dd8f2384e3f00a98d5832c62d6d664741f0 \
|
||||
--hash=sha256:2aceab7cf5b1948bc197f2e5e50c371aa48181ccd490b8bada00f1e3baf0c5cc \
|
||||
--hash=sha256:74bd2110b5daaebcec5e1d287bf137b61cb8cf6b2d8f5f2b74183e32bc4e7c87
|
||||
taskcluster==4.0.1 \
|
||||
--hash=sha256:27256511044346ac71a495d3c636f2add95c102b9b09f90d6fb1ea3e9949d311 \
|
||||
--hash=sha256:99dd90bc1c566968868c8b07ede32f8e031cbccd52c7195a61e802679d461447 \
|
||||
|
373
third_party/python/taskcluster-urls/LICENSE
vendored
373
third_party/python/taskcluster-urls/LICENSE
vendored
@ -1,373 +0,0 @@
|
||||
Mozilla Public License Version 2.0
|
||||
==================================
|
||||
|
||||
1. Definitions
|
||||
--------------
|
||||
|
||||
1.1. "Contributor"
|
||||
means each individual or legal entity that creates, contributes to
|
||||
the creation of, or owns Covered Software.
|
||||
|
||||
1.2. "Contributor Version"
|
||||
means the combination of the Contributions of others (if any) used
|
||||
by a Contributor and that particular Contributor's Contribution.
|
||||
|
||||
1.3. "Contribution"
|
||||
means Covered Software of a particular Contributor.
|
||||
|
||||
1.4. "Covered Software"
|
||||
means Source Code Form to which the initial Contributor has attached
|
||||
the notice in Exhibit A, the Executable Form of such Source Code
|
||||
Form, and Modifications of such Source Code Form, in each case
|
||||
including portions thereof.
|
||||
|
||||
1.5. "Incompatible With Secondary Licenses"
|
||||
means
|
||||
|
||||
(a) that the initial Contributor has attached the notice described
|
||||
in Exhibit B to the Covered Software; or
|
||||
|
||||
(b) that the Covered Software was made available under the terms of
|
||||
version 1.1 or earlier of the License, but not also under the
|
||||
terms of a Secondary License.
|
||||
|
||||
1.6. "Executable Form"
|
||||
means any form of the work other than Source Code Form.
|
||||
|
||||
1.7. "Larger Work"
|
||||
means a work that combines Covered Software with other material, in
|
||||
a separate file or files, that is not Covered Software.
|
||||
|
||||
1.8. "License"
|
||||
means this document.
|
||||
|
||||
1.9. "Licensable"
|
||||
means having the right to grant, to the maximum extent possible,
|
||||
whether at the time of the initial grant or subsequently, any and
|
||||
all of the rights conveyed by this License.
|
||||
|
||||
1.10. "Modifications"
|
||||
means any of the following:
|
||||
|
||||
(a) any file in Source Code Form that results from an addition to,
|
||||
deletion from, or modification of the contents of Covered
|
||||
Software; or
|
||||
|
||||
(b) any new file in Source Code Form that contains any Covered
|
||||
Software.
|
||||
|
||||
1.11. "Patent Claims" of a Contributor
|
||||
means any patent claim(s), including without limitation, method,
|
||||
process, and apparatus claims, in any patent Licensable by such
|
||||
Contributor that would be infringed, but for the grant of the
|
||||
License, by the making, using, selling, offering for sale, having
|
||||
made, import, or transfer of either its Contributions or its
|
||||
Contributor Version.
|
||||
|
||||
1.12. "Secondary License"
|
||||
means either the GNU General Public License, Version 2.0, the GNU
|
||||
Lesser General Public License, Version 2.1, the GNU Affero General
|
||||
Public License, Version 3.0, or any later versions of those
|
||||
licenses.
|
||||
|
||||
1.13. "Source Code Form"
|
||||
means the form of the work preferred for making modifications.
|
||||
|
||||
1.14. "You" (or "Your")
|
||||
means an individual or a legal entity exercising rights under this
|
||||
License. For legal entities, "You" includes any entity that
|
||||
controls, is controlled by, or is under common control with You. For
|
||||
purposes of this definition, "control" means (a) the power, direct
|
||||
or indirect, to cause the direction or management of such entity,
|
||||
whether by contract or otherwise, or (b) ownership of more than
|
||||
fifty percent (50%) of the outstanding shares or beneficial
|
||||
ownership of such entity.
|
||||
|
||||
2. License Grants and Conditions
|
||||
--------------------------------
|
||||
|
||||
2.1. Grants
|
||||
|
||||
Each Contributor hereby grants You a world-wide, royalty-free,
|
||||
non-exclusive license:
|
||||
|
||||
(a) under intellectual property rights (other than patent or trademark)
|
||||
Licensable by such Contributor to use, reproduce, make available,
|
||||
modify, display, perform, distribute, and otherwise exploit its
|
||||
Contributions, either on an unmodified basis, with Modifications, or
|
||||
as part of a Larger Work; and
|
||||
|
||||
(b) under Patent Claims of such Contributor to make, use, sell, offer
|
||||
for sale, have made, import, and otherwise transfer either its
|
||||
Contributions or its Contributor Version.
|
||||
|
||||
2.2. Effective Date
|
||||
|
||||
The licenses granted in Section 2.1 with respect to any Contribution
|
||||
become effective for each Contribution on the date the Contributor first
|
||||
distributes such Contribution.
|
||||
|
||||
2.3. Limitations on Grant Scope
|
||||
|
||||
The licenses granted in this Section 2 are the only rights granted under
|
||||
this License. No additional rights or licenses will be implied from the
|
||||
distribution or licensing of Covered Software under this License.
|
||||
Notwithstanding Section 2.1(b) above, no patent license is granted by a
|
||||
Contributor:
|
||||
|
||||
(a) for any code that a Contributor has removed from Covered Software;
|
||||
or
|
||||
|
||||
(b) for infringements caused by: (i) Your and any other third party's
|
||||
modifications of Covered Software, or (ii) the combination of its
|
||||
Contributions with other software (except as part of its Contributor
|
||||
Version); or
|
||||
|
||||
(c) under Patent Claims infringed by Covered Software in the absence of
|
||||
its Contributions.
|
||||
|
||||
This License does not grant any rights in the trademarks, service marks,
|
||||
or logos of any Contributor (except as may be necessary to comply with
|
||||
the notice requirements in Section 3.4).
|
||||
|
||||
2.4. Subsequent Licenses
|
||||
|
||||
No Contributor makes additional grants as a result of Your choice to
|
||||
distribute the Covered Software under a subsequent version of this
|
||||
License (see Section 10.2) or under the terms of a Secondary License (if
|
||||
permitted under the terms of Section 3.3).
|
||||
|
||||
2.5. Representation
|
||||
|
||||
Each Contributor represents that the Contributor believes its
|
||||
Contributions are its original creation(s) or it has sufficient rights
|
||||
to grant the rights to its Contributions conveyed by this License.
|
||||
|
||||
2.6. Fair Use
|
||||
|
||||
This License is not intended to limit any rights You have under
|
||||
applicable copyright doctrines of fair use, fair dealing, or other
|
||||
equivalents.
|
||||
|
||||
2.7. Conditions
|
||||
|
||||
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
|
||||
in Section 2.1.
|
||||
|
||||
3. Responsibilities
|
||||
-------------------
|
||||
|
||||
3.1. Distribution of Source Form
|
||||
|
||||
All distribution of Covered Software in Source Code Form, including any
|
||||
Modifications that You create or to which You contribute, must be under
|
||||
the terms of this License. You must inform recipients that the Source
|
||||
Code Form of the Covered Software is governed by the terms of this
|
||||
License, and how they can obtain a copy of this License. You may not
|
||||
attempt to alter or restrict the recipients' rights in the Source Code
|
||||
Form.
|
||||
|
||||
3.2. Distribution of Executable Form
|
||||
|
||||
If You distribute Covered Software in Executable Form then:
|
||||
|
||||
(a) such Covered Software must also be made available in Source Code
|
||||
Form, as described in Section 3.1, and You must inform recipients of
|
||||
the Executable Form how they can obtain a copy of such Source Code
|
||||
Form by reasonable means in a timely manner, at a charge no more
|
||||
than the cost of distribution to the recipient; and
|
||||
|
||||
(b) You may distribute such Executable Form under the terms of this
|
||||
License, or sublicense it under different terms, provided that the
|
||||
license for the Executable Form does not attempt to limit or alter
|
||||
the recipients' rights in the Source Code Form under this License.
|
||||
|
||||
3.3. Distribution of a Larger Work
|
||||
|
||||
You may create and distribute a Larger Work under terms of Your choice,
|
||||
provided that You also comply with the requirements of this License for
|
||||
the Covered Software. If the Larger Work is a combination of Covered
|
||||
Software with a work governed by one or more Secondary Licenses, and the
|
||||
Covered Software is not Incompatible With Secondary Licenses, this
|
||||
License permits You to additionally distribute such Covered Software
|
||||
under the terms of such Secondary License(s), so that the recipient of
|
||||
the Larger Work may, at their option, further distribute the Covered
|
||||
Software under the terms of either this License or such Secondary
|
||||
License(s).
|
||||
|
||||
3.4. Notices
|
||||
|
||||
You may not remove or alter the substance of any license notices
|
||||
(including copyright notices, patent notices, disclaimers of warranty,
|
||||
or limitations of liability) contained within the Source Code Form of
|
||||
the Covered Software, except that You may alter any license notices to
|
||||
the extent required to remedy known factual inaccuracies.
|
||||
|
||||
3.5. Application of Additional Terms
|
||||
|
||||
You may choose to offer, and to charge a fee for, warranty, support,
|
||||
indemnity or liability obligations to one or more recipients of Covered
|
||||
Software. However, You may do so only on Your own behalf, and not on
|
||||
behalf of any Contributor. You must make it absolutely clear that any
|
||||
such warranty, support, indemnity, or liability obligation is offered by
|
||||
You alone, and You hereby agree to indemnify every Contributor for any
|
||||
liability incurred by such Contributor as a result of warranty, support,
|
||||
indemnity or liability terms You offer. You may include additional
|
||||
disclaimers of warranty and limitations of liability specific to any
|
||||
jurisdiction.
|
||||
|
||||
4. Inability to Comply Due to Statute or Regulation
|
||||
---------------------------------------------------
|
||||
|
||||
If it is impossible for You to comply with any of the terms of this
|
||||
License with respect to some or all of the Covered Software due to
|
||||
statute, judicial order, or regulation then You must: (a) comply with
|
||||
the terms of this License to the maximum extent possible; and (b)
|
||||
describe the limitations and the code they affect. Such description must
|
||||
be placed in a text file included with all distributions of the Covered
|
||||
Software under this License. Except to the extent prohibited by statute
|
||||
or regulation, such description must be sufficiently detailed for a
|
||||
recipient of ordinary skill to be able to understand it.
|
||||
|
||||
5. Termination
|
||||
--------------
|
||||
|
||||
5.1. The rights granted under this License will terminate automatically
|
||||
if You fail to comply with any of its terms. However, if You become
|
||||
compliant, then the rights granted under this License from a particular
|
||||
Contributor are reinstated (a) provisionally, unless and until such
|
||||
Contributor explicitly and finally terminates Your grants, and (b) on an
|
||||
ongoing basis, if such Contributor fails to notify You of the
|
||||
non-compliance by some reasonable means prior to 60 days after You have
|
||||
come back into compliance. Moreover, Your grants from a particular
|
||||
Contributor are reinstated on an ongoing basis if such Contributor
|
||||
notifies You of the non-compliance by some reasonable means, this is the
|
||||
first time You have received notice of non-compliance with this License
|
||||
from such Contributor, and You become compliant prior to 30 days after
|
||||
Your receipt of the notice.
|
||||
|
||||
5.2. If You initiate litigation against any entity by asserting a patent
|
||||
infringement claim (excluding declaratory judgment actions,
|
||||
counter-claims, and cross-claims) alleging that a Contributor Version
|
||||
directly or indirectly infringes any patent, then the rights granted to
|
||||
You by any and all Contributors for the Covered Software under Section
|
||||
2.1 of this License shall terminate.
|
||||
|
||||
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
|
||||
end user license agreements (excluding distributors and resellers) which
|
||||
have been validly granted by You or Your distributors under this License
|
||||
prior to termination shall survive termination.
|
||||
|
||||
************************************************************************
|
||||
* *
|
||||
* 6. Disclaimer of Warranty *
|
||||
* ------------------------- *
|
||||
* *
|
||||
* Covered Software is provided under this License on an "as is" *
|
||||
* basis, without warranty of any kind, either expressed, implied, or *
|
||||
* statutory, including, without limitation, warranties that the *
|
||||
* Covered Software is free of defects, merchantable, fit for a *
|
||||
* particular purpose or non-infringing. The entire risk as to the *
|
||||
* quality and performance of the Covered Software is with You. *
|
||||
* Should any Covered Software prove defective in any respect, You *
|
||||
* (not any Contributor) assume the cost of any necessary servicing, *
|
||||
* repair, or correction. This disclaimer of warranty constitutes an *
|
||||
* essential part of this License. No use of any Covered Software is *
|
||||
* authorized under this License except under this disclaimer. *
|
||||
* *
|
||||
************************************************************************
|
||||
|
||||
************************************************************************
|
||||
* *
|
||||
* 7. Limitation of Liability *
|
||||
* -------------------------- *
|
||||
* *
|
||||
* Under no circumstances and under no legal theory, whether tort *
|
||||
* (including negligence), contract, or otherwise, shall any *
|
||||
* Contributor, or anyone who distributes Covered Software as *
|
||||
* permitted above, be liable to You for any direct, indirect, *
|
||||
* special, incidental, or consequential damages of any character *
|
||||
* including, without limitation, damages for lost profits, loss of *
|
||||
* goodwill, work stoppage, computer failure or malfunction, or any *
|
||||
* and all other commercial damages or losses, even if such party *
|
||||
* shall have been informed of the possibility of such damages. This *
|
||||
* limitation of liability shall not apply to liability for death or *
|
||||
* personal injury resulting from such party's negligence to the *
|
||||
* extent applicable law prohibits such limitation. Some *
|
||||
* jurisdictions do not allow the exclusion or limitation of *
|
||||
* incidental or consequential damages, so this exclusion and *
|
||||
* limitation may not apply to You. *
|
||||
* *
|
||||
************************************************************************
|
||||
|
||||
8. Litigation
|
||||
-------------
|
||||
|
||||
Any litigation relating to this License may be brought only in the
|
||||
courts of a jurisdiction where the defendant maintains its principal
|
||||
place of business and such litigation shall be governed by laws of that
|
||||
jurisdiction, without reference to its conflict-of-law provisions.
|
||||
Nothing in this Section shall prevent a party's ability to bring
|
||||
cross-claims or counter-claims.
|
||||
|
||||
9. Miscellaneous
|
||||
----------------
|
||||
|
||||
This License represents the complete agreement concerning the subject
|
||||
matter hereof. If any provision of this License is held to be
|
||||
unenforceable, such provision shall be reformed only to the extent
|
||||
necessary to make it enforceable. Any law or regulation which provides
|
||||
that the language of a contract shall be construed against the drafter
|
||||
shall not be used to construe this License against a Contributor.
|
||||
|
||||
10. Versions of the License
|
||||
---------------------------
|
||||
|
||||
10.1. New Versions
|
||||
|
||||
Mozilla Foundation is the license steward. Except as provided in Section
|
||||
10.3, no one other than the license steward has the right to modify or
|
||||
publish new versions of this License. Each version will be given a
|
||||
distinguishing version number.
|
||||
|
||||
10.2. Effect of New Versions
|
||||
|
||||
You may distribute the Covered Software under the terms of the version
|
||||
of the License under which You originally received the Covered Software,
|
||||
or under the terms of any subsequent version published by the license
|
||||
steward.
|
||||
|
||||
10.3. Modified Versions
|
||||
|
||||
If you create software not governed by this License, and you want to
|
||||
create a new license for such software, you may create and use a
|
||||
modified version of this License if you rename the license and remove
|
||||
any references to the name of the license steward (except to note that
|
||||
such modified license differs from this License).
|
||||
|
||||
10.4. Distributing Source Code Form that is Incompatible With Secondary
|
||||
Licenses
|
||||
|
||||
If You choose to distribute Source Code Form that is Incompatible With
|
||||
Secondary Licenses under the terms of this version of the License, the
|
||||
notice described in Exhibit B of this License must be attached.
|
||||
|
||||
Exhibit A - Source Code Form License Notice
|
||||
-------------------------------------------
|
||||
|
||||
This Source Code Form is subject to the terms of the Mozilla Public
|
||||
License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
If it is not possible or desirable to put the notice in a particular
|
||||
file, then You may include the notice in a location (such as a LICENSE
|
||||
file in a relevant directory) where a recipient would be likely to look
|
||||
for such a notice.
|
||||
|
||||
You may add additional accurate notices of copyright ownership.
|
||||
|
||||
Exhibit B - "Incompatible With Secondary Licenses" Notice
|
||||
---------------------------------------------------------
|
||||
|
||||
This Source Code Form is "Incompatible With Secondary Licenses", as
|
||||
defined by the Mozilla Public License, v. 2.0.
|
@ -1,4 +0,0 @@
|
||||
include LICENSE
|
||||
global-exclude *.py[co]
|
||||
include specification.yml
|
||||
include package.json
|
253
third_party/python/taskcluster-urls/PKG-INFO
vendored
253
third_party/python/taskcluster-urls/PKG-INFO
vendored
@ -1,253 +0,0 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: taskcluster-urls
|
||||
Version: 11.0.0
|
||||
Summary: Standardized url generator for taskcluster resources.
|
||||
Home-page: https://github.com/taskcluster/taskcluster-lib-urls
|
||||
Author: Brian Stack
|
||||
Author-email: bstack@mozilla.com
|
||||
License: MPL2
|
||||
Description: # Taskcluster URL Building Library
|
||||
|
||||
[![License](https://img.shields.io/badge/license-MPL%202.0-orange.svg)](http://mozilla.org/MPL/2.0)
|
||||
|
||||
A simple library to generate URLs for various Taskcluster resources across our various deployment methods.
|
||||
|
||||
This serves as both a simple shim for projects that use JavaScript but also is the reference implementation for
|
||||
how we define these paths.
|
||||
|
||||
URLs are defined in the 'Taskcluster URL Format' document.
|
||||
|
||||
Changelog
|
||||
---------
|
||||
View the changelog on the [releases page](https://github.com/taskcluster/taskcluster-lib-urls/releases).
|
||||
|
||||
Requirements
|
||||
------------
|
||||
|
||||
This is tested on and should run on any of Node.js `{8, 10}`.
|
||||
|
||||
JS Usage
|
||||
--------
|
||||
[![Node.js Build Status](https://travis-ci.org/taskcluster/taskcluster-lib-urls.svg?branch=master)](https://travis-ci.org/taskcluster/taskcluster-lib-urls)
|
||||
[![npm](https://img.shields.io/npm/v/taskcluster-lib-urls.svg?maxAge=2592000)](https://www.npmjs.com/package/taskcluster-lib-urls)
|
||||
|
||||
This package exports several methods for generating URLs conditionally based on
|
||||
a root URL, as well as a few helper classes for generating URLs for a pre-determined
|
||||
root URL:
|
||||
|
||||
* `api(rootUrl, service, version, path)` -> `String`
|
||||
* `apiReference(rootUrl, service, version)` -> `String`
|
||||
* `docs(rootUrl, path)` -> `String`
|
||||
* `exchangeReference(rootUrl, service, version)` -> `String`
|
||||
* `schema(rootUrl, service, schema)` -> `String`
|
||||
* `ui(rootUrl, path)` -> `String`
|
||||
* `servicesManifest(rootUrl)` -> `String`
|
||||
* `testRootUrl()` -> `String`
|
||||
* `withRootUrl(rootUrl)` -> `Class` instance for above methods
|
||||
|
||||
When the `rootUrl` is `https://taskcluster.net`, the generated URLs will be to the Heroku cluster. Otherwise they will follow the
|
||||
[spec defined in this project](https://github.com/taskcluster/taskcluster-lib-urls/tree/master/docs/urls-spec.md).
|
||||
|
||||
`testRootUrl()` is used to share a common fake `rootUrl` between various Taskcluster mocks in testing.
|
||||
The URL does not resolve.
|
||||
|
||||
```js
|
||||
// Specifying root URL every time:
|
||||
const libUrls = require('taskcluster-lib-urls');
|
||||
|
||||
libUrls.api(rootUrl, 'auth', 'v1', 'foo/bar');
|
||||
libUrls.schema(rootUrl, 'auth', 'v1/foo.yml'); // Note that schema names have versions in them
|
||||
libUrls.apiReference(rootUrl, 'auth', 'v1');
|
||||
libUrls.exchangeReference(rootUrl, 'auth', 'v1');
|
||||
libUrls.ui(rootUrl, 'foo/bar');
|
||||
libUrls.servicesManifest(rootUrl);
|
||||
libUrls.docs(rootUrl, 'foo/bar');
|
||||
```
|
||||
|
||||
```js
|
||||
// Specifying root URL in advance:
|
||||
const libUrls = require('taskcluster-lib-urls');
|
||||
|
||||
const urls = libUrls.withRoot(rootUrl);
|
||||
|
||||
urls.api('auth', 'v1', 'foo/bar');
|
||||
urls.schema('auth', 'v1/foo.yml');
|
||||
urls.apiReference('auth', 'v1');
|
||||
urls.exchangeReference('auth', 'v1');
|
||||
urls.ui('foo/bar');
|
||||
urls.servicesManifest();
|
||||
urls.docs('foo/bar');
|
||||
```
|
||||
|
||||
If you would like, you can set this up via [taskcluster-lib-loader](https://github.com/taskcluster/taskcluster-lib-loader) as follows:
|
||||
|
||||
```js
|
||||
{
|
||||
libUrlss: {
|
||||
require: ['cfg'],
|
||||
setup: ({cfg}) => withRootUrl(cfg.rootURl),
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
Test with:
|
||||
|
||||
```
|
||||
yarn install
|
||||
yarn test
|
||||
```
|
||||
|
||||
|
||||
Go Usage
|
||||
--------
|
||||
|
||||
[![GoDoc](https://godoc.org/github.com/taskcluster/taskcluster-lib-urls?status.svg)](https://godoc.org/github.com/taskcluster/taskcluster-lib-urls)
|
||||
|
||||
The go package exports the following functions:
|
||||
|
||||
```go
|
||||
func API(rootURL string, service string, version string, path string) string
|
||||
func APIReference(rootURL string, service string, version string) string
|
||||
func Docs(rootURL string, path string) string
|
||||
func ExchangeReference(rootURL string, service string, version string) string
|
||||
func Schema(rootURL string, service string, name string) string
|
||||
func UI(rootURL string, path string) string
|
||||
func ServicesManifest(rootURL string) string
|
||||
```
|
||||
|
||||
Install with:
|
||||
|
||||
```
|
||||
go install ./..
|
||||
```
|
||||
|
||||
Test with:
|
||||
|
||||
```
|
||||
go test -v ./...
|
||||
```
|
||||
|
||||
Python Usage
|
||||
------------
|
||||
|
||||
You can install the python client with `pip install taskcluster-urls`;
|
||||
|
||||
```python
|
||||
import taskcluster_urls
|
||||
|
||||
taskcluster_urls.api(root_url, 'auth', 'v1', 'foo/bar')
|
||||
taskcluster_urls.schema(root_url, 'auth', 'v1/foo.yml') # Note that schema names have versions in them
|
||||
taskcluster_urls.api_reference(root_url, 'auth', 'v1')
|
||||
taskcluster_urls.exchange_reference(root_url, 'auth', 'v1')
|
||||
taskcluster_urls.ui(root_url, 'foo/bar')
|
||||
taskcluster_urls.servicesManifest(root_url)
|
||||
taskcluster_urls.docs(root_url, 'foo/bar')
|
||||
|
||||
And for testing,
|
||||
```python
|
||||
taskcluster_urls.test_root_url()
|
||||
```
|
||||
|
||||
Test with:
|
||||
|
||||
```
|
||||
tox
|
||||
```
|
||||
|
||||
Java Usage
|
||||
----------
|
||||
|
||||
[![JavaDoc](https://img.shields.io/badge/javadoc-reference-blue.svg)](http://taskcluster.github.io/taskcluster-lib-urls/apidocs)
|
||||
|
||||
In order to use this library from your maven project, simply include it as a project dependency:
|
||||
|
||||
```
|
||||
<project>
|
||||
...
|
||||
<dependencies>
|
||||
...
|
||||
<dependency>
|
||||
<groupId>org.mozilla.taskcluster</groupId>
|
||||
<artifactId>taskcluster-lib-urls</artifactId>
|
||||
<version>1.0.0</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
```
|
||||
|
||||
The taskcluster-lib-urls artifacts are now available from the [maven central repository](http://central.sonatype.org/):
|
||||
|
||||
* [Search Results](http://search.maven.org/#search|gav|1|g%3A%22org.mozilla.taskcluster%22%20AND%20a%3A%22taskcluster-lib-urls%22)
|
||||
* [Directory Listing](https://repo1.maven.org/maven2/org/mozilla/taskcluster/taskcluster-lib-urls/)
|
||||
|
||||
To use the library, do as follows:
|
||||
|
||||
```java
|
||||
import org.mozilla.taskcluster.urls.*;
|
||||
|
||||
...
|
||||
|
||||
URLProvider urlProvider = URLs.provider("https://mytaskcluster.acme.org");
|
||||
|
||||
String fooBarAPI = urlProvider.api("auth", "v1", "foo/bar");
|
||||
String fooSchema = urlProvider.schema("auth", "v1/foo.yml"); // Note that schema names have versions in them
|
||||
String authAPIRef = urlProvider.apiReference("auth", "v1");
|
||||
String authExchangesRef = urlProvider.exchangeReference("auth", "v1");
|
||||
String uiFooBar = urlProvider.ui("foo/bar");
|
||||
String servicesManifest = urlProvider.servicesManifest();
|
||||
String docsFooBar = urlProvider.docs("foo/bar");
|
||||
|
||||
...
|
||||
```
|
||||
|
||||
Install with:
|
||||
|
||||
```
|
||||
mvn install
|
||||
```
|
||||
|
||||
Test with:
|
||||
|
||||
```
|
||||
mvn test
|
||||
```
|
||||
|
||||
|
||||
Releasing
|
||||
---------
|
||||
|
||||
New releases should be tested on Travis and Taskcluster to allow for all supported versions of various languages to be tested. Once satisfied that it works, new versions should be created with
|
||||
`npm version` rather than by manually editing `package.json` and tags should be pushed to Github.
|
||||
|
||||
Make the Node release first, as Python's version depends on its `package.json`. This follows the typical tag-and-push-to-publish approach:
|
||||
|
||||
```sh
|
||||
$ npm version minor # or patch, or major
|
||||
$ git push upstream
|
||||
```
|
||||
|
||||
Once that's done, build the Python sdists (only possible by the [maintainers on pypi](https://pypi.org/project/taskcluster-urls/#files)):
|
||||
|
||||
```sh
|
||||
rm -rf dist/*
|
||||
python setup.py sdist bdist_wheel
|
||||
python3 setup.py bdist_wheel
|
||||
pip install twine
|
||||
twine upload dist/*
|
||||
```
|
||||
|
||||
Make sure to update [the changelog](https://github.com/taskcluster/taskcluster-lib-urls/releases)!
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
[Mozilla Public License Version 2.0](https://github.com/taskcluster/taskcluster-lib-urls/blob/master/LICENSE)
|
||||
|
||||
Platform: UNKNOWN
|
||||
Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Description-Content-Type: text/markdown
|
236
third_party/python/taskcluster-urls/README.md
vendored
236
third_party/python/taskcluster-urls/README.md
vendored
@ -1,236 +0,0 @@
|
||||
# Taskcluster URL Building Library
|
||||
|
||||
[![License](https://img.shields.io/badge/license-MPL%202.0-orange.svg)](http://mozilla.org/MPL/2.0)
|
||||
|
||||
A simple library to generate URLs for various Taskcluster resources across our various deployment methods.
|
||||
|
||||
This serves as both a simple shim for projects that use JavaScript but also is the reference implementation for
|
||||
how we define these paths.
|
||||
|
||||
URLs are defined in the 'Taskcluster URL Format' document.
|
||||
|
||||
Changelog
|
||||
---------
|
||||
View the changelog on the [releases page](https://github.com/taskcluster/taskcluster-lib-urls/releases).
|
||||
|
||||
Requirements
|
||||
------------
|
||||
|
||||
This is tested on and should run on any of Node.js `{8, 10}`.
|
||||
|
||||
JS Usage
|
||||
--------
|
||||
[![Node.js Build Status](https://travis-ci.org/taskcluster/taskcluster-lib-urls.svg?branch=master)](https://travis-ci.org/taskcluster/taskcluster-lib-urls)
|
||||
[![npm](https://img.shields.io/npm/v/taskcluster-lib-urls.svg?maxAge=2592000)](https://www.npmjs.com/package/taskcluster-lib-urls)
|
||||
|
||||
This package exports several methods for generating URLs conditionally based on
|
||||
a root URL, as well as a few helper classes for generating URLs for a pre-determined
|
||||
root URL:
|
||||
|
||||
* `api(rootUrl, service, version, path)` -> `String`
|
||||
* `apiReference(rootUrl, service, version)` -> `String`
|
||||
* `docs(rootUrl, path)` -> `String`
|
||||
* `exchangeReference(rootUrl, service, version)` -> `String`
|
||||
* `schema(rootUrl, service, schema)` -> `String`
|
||||
* `ui(rootUrl, path)` -> `String`
|
||||
* `servicesManifest(rootUrl)` -> `String`
|
||||
* `testRootUrl()` -> `String`
|
||||
* `withRootUrl(rootUrl)` -> `Class` instance for above methods
|
||||
|
||||
When the `rootUrl` is `https://taskcluster.net`, the generated URLs will be to the Heroku cluster. Otherwise they will follow the
|
||||
[spec defined in this project](https://github.com/taskcluster/taskcluster-lib-urls/tree/master/docs/urls-spec.md).
|
||||
|
||||
`testRootUrl()` is used to share a common fake `rootUrl` between various Taskcluster mocks in testing.
|
||||
The URL does not resolve.
|
||||
|
||||
```js
|
||||
// Specifying root URL every time:
|
||||
const libUrls = require('taskcluster-lib-urls');
|
||||
|
||||
libUrls.api(rootUrl, 'auth', 'v1', 'foo/bar');
|
||||
libUrls.schema(rootUrl, 'auth', 'v1/foo.yml'); // Note that schema names have versions in them
|
||||
libUrls.apiReference(rootUrl, 'auth', 'v1');
|
||||
libUrls.exchangeReference(rootUrl, 'auth', 'v1');
|
||||
libUrls.ui(rootUrl, 'foo/bar');
|
||||
libUrls.servicesManifest(rootUrl);
|
||||
libUrls.docs(rootUrl, 'foo/bar');
|
||||
```
|
||||
|
||||
```js
|
||||
// Specifying root URL in advance:
|
||||
const libUrls = require('taskcluster-lib-urls');
|
||||
|
||||
const urls = libUrls.withRoot(rootUrl);
|
||||
|
||||
urls.api('auth', 'v1', 'foo/bar');
|
||||
urls.schema('auth', 'v1/foo.yml');
|
||||
urls.apiReference('auth', 'v1');
|
||||
urls.exchangeReference('auth', 'v1');
|
||||
urls.ui('foo/bar');
|
||||
urls.servicesManifest();
|
||||
urls.docs('foo/bar');
|
||||
```
|
||||
|
||||
If you would like, you can set this up via [taskcluster-lib-loader](https://github.com/taskcluster/taskcluster-lib-loader) as follows:
|
||||
|
||||
```js
|
||||
{
|
||||
libUrlss: {
|
||||
require: ['cfg'],
|
||||
setup: ({cfg}) => withRootUrl(cfg.rootURl),
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
Test with:
|
||||
|
||||
```
|
||||
yarn install
|
||||
yarn test
|
||||
```
|
||||
|
||||
|
||||
Go Usage
|
||||
--------
|
||||
|
||||
[![GoDoc](https://godoc.org/github.com/taskcluster/taskcluster-lib-urls?status.svg)](https://godoc.org/github.com/taskcluster/taskcluster-lib-urls)
|
||||
|
||||
The go package exports the following functions:
|
||||
|
||||
```go
|
||||
func API(rootURL string, service string, version string, path string) string
|
||||
func APIReference(rootURL string, service string, version string) string
|
||||
func Docs(rootURL string, path string) string
|
||||
func ExchangeReference(rootURL string, service string, version string) string
|
||||
func Schema(rootURL string, service string, name string) string
|
||||
func UI(rootURL string, path string) string
|
||||
func ServicesManifest(rootURL string) string
|
||||
```
|
||||
|
||||
Install with:
|
||||
|
||||
```
|
||||
go install ./..
|
||||
```
|
||||
|
||||
Test with:
|
||||
|
||||
```
|
||||
go test -v ./...
|
||||
```
|
||||
|
||||
Python Usage
|
||||
------------
|
||||
|
||||
You can install the python client with `pip install taskcluster-urls`;
|
||||
|
||||
```python
|
||||
import taskcluster_urls
|
||||
|
||||
taskcluster_urls.api(root_url, 'auth', 'v1', 'foo/bar')
|
||||
taskcluster_urls.schema(root_url, 'auth', 'v1/foo.yml') # Note that schema names have versions in them
|
||||
taskcluster_urls.api_reference(root_url, 'auth', 'v1')
|
||||
taskcluster_urls.exchange_reference(root_url, 'auth', 'v1')
|
||||
taskcluster_urls.ui(root_url, 'foo/bar')
|
||||
taskcluster_urls.servicesManifest(root_url)
|
||||
taskcluster_urls.docs(root_url, 'foo/bar')
|
||||
|
||||
And for testing,
|
||||
```python
|
||||
taskcluster_urls.test_root_url()
|
||||
```
|
||||
|
||||
Test with:
|
||||
|
||||
```
|
||||
tox
|
||||
```
|
||||
|
||||
Java Usage
|
||||
----------
|
||||
|
||||
[![JavaDoc](https://img.shields.io/badge/javadoc-reference-blue.svg)](http://taskcluster.github.io/taskcluster-lib-urls/apidocs)
|
||||
|
||||
In order to use this library from your maven project, simply include it as a project dependency:
|
||||
|
||||
```
|
||||
<project>
|
||||
...
|
||||
<dependencies>
|
||||
...
|
||||
<dependency>
|
||||
<groupId>org.mozilla.taskcluster</groupId>
|
||||
<artifactId>taskcluster-lib-urls</artifactId>
|
||||
<version>1.0.0</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
```
|
||||
|
||||
The taskcluster-lib-urls artifacts are now available from the [maven central repository](http://central.sonatype.org/):
|
||||
|
||||
* [Search Results](http://search.maven.org/#search|gav|1|g%3A%22org.mozilla.taskcluster%22%20AND%20a%3A%22taskcluster-lib-urls%22)
|
||||
* [Directory Listing](https://repo1.maven.org/maven2/org/mozilla/taskcluster/taskcluster-lib-urls/)
|
||||
|
||||
To use the library, do as follows:
|
||||
|
||||
```java
|
||||
import org.mozilla.taskcluster.urls.*;
|
||||
|
||||
...
|
||||
|
||||
URLProvider urlProvider = URLs.provider("https://mytaskcluster.acme.org");
|
||||
|
||||
String fooBarAPI = urlProvider.api("auth", "v1", "foo/bar");
|
||||
String fooSchema = urlProvider.schema("auth", "v1/foo.yml"); // Note that schema names have versions in them
|
||||
String authAPIRef = urlProvider.apiReference("auth", "v1");
|
||||
String authExchangesRef = urlProvider.exchangeReference("auth", "v1");
|
||||
String uiFooBar = urlProvider.ui("foo/bar");
|
||||
String servicesManifest = urlProvider.servicesManifest();
|
||||
String docsFooBar = urlProvider.docs("foo/bar");
|
||||
|
||||
...
|
||||
```
|
||||
|
||||
Install with:
|
||||
|
||||
```
|
||||
mvn install
|
||||
```
|
||||
|
||||
Test with:
|
||||
|
||||
```
|
||||
mvn test
|
||||
```
|
||||
|
||||
|
||||
Releasing
|
||||
---------
|
||||
|
||||
New releases should be tested on Travis and Taskcluster to allow for all supported versions of various languages to be tested. Once satisfied that it works, new versions should be created with
|
||||
`npm version` rather than by manually editing `package.json` and tags should be pushed to Github.
|
||||
|
||||
Make the Node release first, as Python's version depends on its `package.json`. This follows the typical tag-and-push-to-publish approach:
|
||||
|
||||
```sh
|
||||
$ npm version minor # or patch, or major
|
||||
$ git push upstream
|
||||
```
|
||||
|
||||
Once that's done, build the Python sdists (only possible by the [maintainers on pypi](https://pypi.org/project/taskcluster-urls/#files)):
|
||||
|
||||
```sh
|
||||
rm -rf dist/*
|
||||
python setup.py sdist bdist_wheel
|
||||
python3 setup.py bdist_wheel
|
||||
pip install twine
|
||||
twine upload dist/*
|
||||
```
|
||||
|
||||
Make sure to update [the changelog](https://github.com/taskcluster/taskcluster-lib-urls/releases)!
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
[Mozilla Public License Version 2.0](https://github.com/taskcluster/taskcluster-lib-urls/blob/master/LICENSE)
|
25
third_party/python/taskcluster-urls/package.json
vendored
25
third_party/python/taskcluster-urls/package.json
vendored
@ -1,25 +0,0 @@
|
||||
{
|
||||
"name": "taskcluster-lib-urls",
|
||||
"version": "11.0.0",
|
||||
"author": "Brian Stack <bstack@mozilla.com>",
|
||||
"description": "Build urls for taskcluster resources.",
|
||||
"license": "MPL-2.0",
|
||||
"scripts": {
|
||||
"lint": "eslint src/*.js test/*.js",
|
||||
"pretest": "yarn lint",
|
||||
"test": "mocha test/*_test.js"
|
||||
},
|
||||
"files": [
|
||||
"src"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/taskcluster/taskcluster-lib-urls.git"
|
||||
},
|
||||
"main": "./src/index.js",
|
||||
"devDependencies": {
|
||||
"eslint-config-taskcluster": "^3.1.0",
|
||||
"js-yaml": "^3.11.0",
|
||||
"mocha": "^5.1.1"
|
||||
}
|
||||
}
|
@ -1,7 +0,0 @@
|
||||
[tools:pytest]
|
||||
flake8-max-line-length = 120
|
||||
|
||||
[egg_info]
|
||||
tag_build =
|
||||
tag_date = 0
|
||||
|
28
third_party/python/taskcluster-urls/setup.py
vendored
28
third_party/python/taskcluster-urls/setup.py
vendored
@ -1,28 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
from setuptools import setup
|
||||
|
||||
package_json = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'package.json')
|
||||
with open(package_json) as f:
|
||||
version = json.load(f)['version']
|
||||
|
||||
setup(
|
||||
name='taskcluster-urls',
|
||||
description='Standardized url generator for taskcluster resources.',
|
||||
long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(),
|
||||
long_description_content_type='text/markdown',
|
||||
url='https://github.com/taskcluster/taskcluster-lib-urls',
|
||||
version=version,
|
||||
packages=['taskcluster_urls'],
|
||||
author='Brian Stack',
|
||||
author_email='bstack@mozilla.com',
|
||||
license='MPL2',
|
||||
classifiers=[
|
||||
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
],
|
||||
)
|
@ -1,66 +0,0 @@
|
||||
OLD_ROOT_URL = 'https://taskcluster.net'
|
||||
|
||||
def api(root_url, service, version, path):
|
||||
"""Generate URL for path in a Taskcluster service."""
|
||||
root_url = root_url.rstrip('/')
|
||||
path = path.lstrip('/')
|
||||
if root_url == OLD_ROOT_URL:
|
||||
return 'https://{}.taskcluster.net/{}/{}'.format(service, version, path)
|
||||
else:
|
||||
return '{}/api/{}/{}/{}'.format(root_url, service, version, path)
|
||||
|
||||
def api_reference(root_url, service, version):
|
||||
"""Generate URL for a Taskcluster api reference."""
|
||||
root_url = root_url.rstrip('/')
|
||||
if root_url == OLD_ROOT_URL:
|
||||
return 'https://references.taskcluster.net/{}/{}/api.json'.format(service, version)
|
||||
else:
|
||||
return '{}/references/{}/{}/api.json'.format(root_url, service, version)
|
||||
|
||||
def docs(root_url, path):
|
||||
"""Generate URL for path in the Taskcluster docs."""
|
||||
root_url = root_url.rstrip('/')
|
||||
path = path.lstrip('/')
|
||||
if root_url == OLD_ROOT_URL:
|
||||
return 'https://docs.taskcluster.net/{}'.format(path)
|
||||
else:
|
||||
return '{}/docs/{}'.format(root_url, path)
|
||||
|
||||
def exchange_reference(root_url, service, version):
|
||||
"""Generate URL for a Taskcluster exchange reference."""
|
||||
root_url = root_url.rstrip('/')
|
||||
if root_url == OLD_ROOT_URL:
|
||||
return 'https://references.taskcluster.net/{}/{}/exchanges.json'.format(service, version)
|
||||
else:
|
||||
return '{}/references/{}/{}/exchanges.json'.format(root_url, service, version)
|
||||
|
||||
def schema(root_url, service, name):
|
||||
"""Generate URL for a schema in a Taskcluster service."""
|
||||
root_url = root_url.rstrip('/')
|
||||
name = name.lstrip('/')
|
||||
if root_url == OLD_ROOT_URL:
|
||||
return 'https://schemas.taskcluster.net/{}/{}'.format(service, name)
|
||||
else:
|
||||
return '{}/schemas/{}/{}'.format(root_url, service, name)
|
||||
|
||||
def ui(root_url, path):
|
||||
"""Generate URL for a path in the Taskcluster ui."""
|
||||
root_url = root_url.rstrip('/')
|
||||
path = path.lstrip('/')
|
||||
if root_url == OLD_ROOT_URL:
|
||||
return 'https://tools.taskcluster.net/{}'.format(path)
|
||||
else:
|
||||
return '{}/{}'.format(root_url, path)
|
||||
|
||||
def services_manifest(root_url):
|
||||
"""Returns a URL for the service manifest of a taskcluster deployment."""
|
||||
root_url = root_url.rstrip('/')
|
||||
if root_url == OLD_ROOT_URL:
|
||||
return 'https://references.taskcluster.net/manifest.json'
|
||||
else:
|
||||
return '{}/references/manifest.json'.format(root_url)
|
||||
|
||||
def test_root_url():
|
||||
"""Returns a standardized "testing" rootUrl that does not resolve but
|
||||
is easily recognizable in test failures."""
|
||||
return 'https://tc-tests.example.com'
|
@ -55,9 +55,6 @@ def invalidate(cache, root):
|
||||
|
||||
|
||||
def generate_tasks(params, full, root):
|
||||
# Ensure that TASKCLUSTER_ROOT_URL is set
|
||||
taskgraph.set_root_url_env()
|
||||
|
||||
params = params or "project=mozilla-central"
|
||||
|
||||
# Try to delete the old taskgraph cache directory.
|
||||
|
Loading…
Reference in New Issue
Block a user