Bug 1492664 - set TASKCLUSTER_ROOT_URL and TASKCLUSTER_PROXY_URL; r=tomprince,glandium

Eventually, workers will provide these variables directly
(https://bugzilla.mozilla.org/show_bug.cgi?id=1460015).  But for now, this
ensures that TASKCLUSTER_ROOT_URL is set everywhere in production, and
TASKCLUSTER_PROXY_URL is set wherever the proxy is active.

The taskgraph Taskcluster utils module gets a `get_root_url()` that gets the
root URL for the current run, either from an environment variable in production
or, on the command line, defaulting to https://taskcluster.net for user
convenience.  When the production instance's URL changes, we can simply change
that default.

Other changes to use this function are reserved for later commits.

This changes the docker build process propagate TASKCLUSTER_ROOT_URL into the
docker images where necessary (using %ARG), specifically to create URLs for
debian repo paths.

--HG--
extra : rebase_source : 4f50e9d066da62a1887baabd8603844c85a32ee6
extra : source : 5ea6f03f845e49d503f5d0283557f54561c41654
This commit is contained in:
Dustin J. Mitchell 2018-09-25 20:18:19 +00:00
parent c2ecf453ff
commit 1516eb0435
13 changed files with 84 additions and 9 deletions

View File

@ -146,6 +146,9 @@ tasks:
GECKO_COMMIT_MSG: {$if: 'tasks_for != "action"', then: '${push.comment}'}
HG_STORE_PATH: /builds/worker/checkouts/hg-store
TASKCLUSTER_CACHES: /builds/worker/checkouts
# someday, these will be provided by the worker - Bug 1492664
TASKCLUSTER_ROOT_URL: https://taskcluster.net
TASKCLUSTER_PROXY_URL: http://taskcluster
- $if: 'tasks_for == "action"'
then:
ACTION_TASK_GROUP_ID: '${action.taskGroupId}' # taskGroupId of the target task

View File

@ -218,6 +218,7 @@ Task group: [{task_group_id}](https://tools.taskcluster.net/groups/{task_group_i
notify_options = {}
if 'TASKCLUSTER_PROXY_URL' in os.environ:
# Until bug 1460015 is finished, use the old baseUrl style of proxy URL
base_url = os.environ['TASKCLUSTER_PROXY_URL'].rstrip('/')
notify_options['baseUrl'] = '{}/notify/v1'.format(base_url)
notify = Notify(notify_options)

View File

@ -49,8 +49,9 @@ RUN apt-get update && \
COPY setup_packages.sh /usr/local/sbin/
COPY cloud-mirror-workaround.sh /usr/local/sbin/
# %ARG TASKCLUSTER_ROOT_URL
# %ARG DOCKER_IMAGE_PACKAGES
RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES && \
RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES && \
echo 'dir::bin::methods::https "/usr/local/sbin/cloud-mirror-workaround.sh";' > /etc/apt/apt.conf.d/99cloud-mirror-workaround && \
apt-get update && \
apt-get install \

View File

@ -1,5 +1,17 @@
#!/bin/sh
TASKCLUSTER_ROOT_URL=$1
shift
# duplicate the functionality of taskcluster-lib-urls, but in bash..
if [ "$TASKCLUSTER_ROOT_URL" = "https://taskcluster.net" ]; then
queue_base='https://queue.taskcluster.net/v1'
else
queue_base="$TASKCLUSTER_ROOT_URL/api/queue/v1"
fi
for task in "$@"; do
echo "deb [trusted=yes] https://queue.taskcluster.net/v1/task/$task/artifacts/public/build/ debian/" > "/etc/apt/sources.list.d/99$task.list"
echo "adding package source $queue_base/task/$task/artifacts/public/build/"
echo "deb [trusted=yes] $queue_base/task/$task/artifacts/public/build/ debian/" > "/etc/apt/sources.list.d/99$task.list"
done

View File

@ -8,8 +8,9 @@ VOLUME /builds/worker/tooltool-cache
ENV XZ_OPT=-T0
# %ARG TASKCLUSTER_ROOT_URL
# %ARG DOCKER_IMAGE_PACKAGES
RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES
RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES
# %ARG ARCH
RUN dpkg --add-architecture $ARCH

View File

@ -6,7 +6,8 @@ VOLUME /builds/worker/checkouts
VOLUME /builds/worker/workspace
VOLUME /builds/worker/tooltool-cache
# %ARG TASKCLUSTER_ROOT_URL
# %ARG DOCKER_IMAGE_PACKAGES
RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES && \
RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES && \
apt-get update && \
apt-get install cmake

View File

@ -9,7 +9,8 @@ VOLUME /builds/worker/tooltool-cache
ENV XZ_OPT=-T0
# %ARG DOCKER_IMAGE_PACKAGES
RUN /usr/local/sbin/setup_packages.sh $DOCKER_IMAGE_PACKAGES
# %ARG TASKCLUSTER_ROOT_URL
RUN /usr/local/sbin/setup_packages.sh $TASKCLUSTER_ROOT_URL $DOCKER_IMAGE_PACKAGES
RUN apt-get update && \
apt-get install \

View File

@ -10,6 +10,8 @@ import stat
import tarfile
import tempfile
import unittest
import mock
import taskcluster_urls as liburls
from taskgraph.util import docker
from mozunit import main, MockedOpen
@ -18,6 +20,7 @@ from mozunit import main, MockedOpen
MODE_STANDARD = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
@mock.patch.dict('os.environ', {'TASKCLUSTER_ROOT_URL': liburls.test_root_url()})
class TestDocker(unittest.TestCase):
def test_generate_context_hash(self):

View File

@ -15,6 +15,7 @@ from .. import GECKO
from taskgraph.util.docker import (
generate_context_hash,
)
from taskgraph.util.taskcluster import get_root_url
from taskgraph.util.schema import (
Schema,
)
@ -111,6 +112,8 @@ def fill_template(config, tasks):
if parent:
args['DOCKER_IMAGE_PARENT'] = '{}:{}'.format(parent, context_hashes[parent])
args['TASKCLUSTER_ROOT_URL'] = get_root_url()
if not taskgraph.fast:
context_path = os.path.join('taskcluster', 'docker', definition)
context_hash = generate_context_hash(

View File

@ -9,6 +9,7 @@ from __future__ import absolute_import, print_function, unicode_literals
import os
import re
import taskcluster_urls
from taskgraph.util.schema import Schema
from voluptuous import Any, Optional, Required
@ -17,6 +18,7 @@ from taskgraph.transforms.job import run_job_using
from taskgraph.transforms.job.common import add_artifacts
from taskgraph.util.hash import hash_path
from taskgraph.util.taskcluster import get_root_url
from taskgraph import GECKO
import taskgraph
@ -152,6 +154,8 @@ def docker_worker_debian_package(config, job, taskdesc):
dist=run['dist'],
)
queue_url = taskcluster_urls.api(get_root_url(), 'queue', 'v1', '')
# We can't depend on docker images (since docker images depend on packages),
# so we inline the whole script here.
worker['command'] = [
@ -171,8 +175,7 @@ def docker_worker_debian_package(config, job, taskdesc):
# Add sources for packages coming from other package tasks.
'apt-get install -yyq apt-transport-https ca-certificates && '
'for task in $PACKAGES; do '
' echo "deb [trusted=yes] https://queue.taskcluster.net/v1/task'
'/$task/artifacts/public/build/ debian/" '
' echo "deb [trusted=yes] {queue_url}task/$task/artifacts/public/build/ debian/" '
'>> /etc/apt/sources.list; '
'done && '
# Install the base utilities required to build debian packages.
@ -198,6 +201,7 @@ def docker_worker_debian_package(config, job, taskdesc):
'apt-ftparchive sources debian | gzip -c9 > debian/Sources.gz && '
'apt-ftparchive packages debian | gzip -c9 > debian/Packages.gz'
.format(
queue_url=queue_url,
package=package,
snapshot=run['snapshot'],
dist=run['dist'],

View File

@ -21,6 +21,7 @@ from taskgraph.util.attributes import TRUNK_PROJECTS
from taskgraph.util.hash import hash_path
from taskgraph.util.treeherder import split_symbol
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.taskcluster import get_root_url
from taskgraph.util.schema import (
validate_schema,
Schema,
@ -499,6 +500,11 @@ def build_docker_worker_payload(config, task, task_def):
else:
raise Exception("unknown docker image type")
# propagate our TASKCLUSTER_ROOT_URL to the task; note that this will soon
# be provided directly by the worker, making this redundant:
# https://bugzilla.mozilla.org/show_bug.cgi?id=1460015
worker['env']['TASKCLUSTER_ROOT_URL'] = get_root_url()
features = {}
if worker.get('relengapi-proxy'):
@ -531,6 +537,11 @@ def build_docker_worker_payload(config, task, task_def):
else:
worker['env']['SCCACHE_DISABLE'] = '1'
# this will soon be provided directly by the worker:
# https://bugzilla.mozilla.org/show_bug.cgi?id=1460015
if features.get('taskclusterProxy'):
worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster'
capabilities = {}
for lo in 'audio', 'video':
@ -763,6 +774,11 @@ def build_generic_worker_payload(config, task, task_def):
env = worker.get('env', {})
# propagate our TASKCLUSTER_ROOT_URL to the task; note that this will soon
# be provided directly by the worker, making this redundant:
# https://bugzilla.mozilla.org/show_bug.cgi?id=1460015
env['TASKCLUSTER_ROOT_URL'] = get_root_url()
if task.get('needs-sccache'):
env['USE_SCCACHE'] = '1'
# Disable sccache idle shutdown.
@ -817,7 +833,9 @@ def build_generic_worker_payload(config, task, task_def):
if worker.get('taskcluster-proxy'):
features['taskclusterProxy'] = True
worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster/'
# this will soon be provided directly by the worker:
# https://bugzilla.mozilla.org/show_bug.cgi?id=1460015
worker['env']['TASKCLUSTER_PROXY_URL'] = 'http://taskcluster'
if worker.get('run-as-administrator', False):
features['runAsAdministrator'] = True
@ -1315,6 +1333,11 @@ def build_always_optimized_payload(config, task, task_def):
})
def build_macosx_engine_payload(config, task, task_def):
worker = task['worker']
# propagate our TASKCLUSTER_ROOT_URL to the task; note that this will soon
# be provided directly by the worker, making this redundant
worker.setdefault('env', {})['TASKCLUSTER_ROOT_URL'] = get_root_url()
artifacts = map(lambda artifact: {
'name': artifact['name'],
'path': artifact['path'],

View File

@ -208,6 +208,7 @@ def stream_context_tar(topsrcdir, context_dir, out_file, prefix, args=None):
object."""
archive_files = {}
replace = []
content = []
context_dir = os.path.join(topsrcdir, context_dir)
@ -219,7 +220,6 @@ def stream_context_tar(topsrcdir, context_dir, out_file, prefix, args=None):
archive_files[archive_path] = source_path
# Parse Dockerfile for special syntax of extra files to include.
content = []
with open(os.path.join(context_dir, 'Dockerfile'), 'rb') as fh:
for line in fh:
if line.startswith('# %ARG'):

View File

@ -6,6 +6,7 @@
from __future__ import absolute_import, print_function, unicode_literals
import os
import datetime
import functools
import yaml
@ -27,6 +28,27 @@ logger = logging.getLogger(__name__)
# this is set to true for `mach taskgraph action-callback --test`
testing = False
# Default rootUrl to use if none is given in the environment; this should point
# to the production Taskcluster deployment used for CI.
PRODUCTION_TASKCLUSTER_ROOT_URL = 'https://taskcluster.net'
@memoize
def get_root_url():
"""Get the current TASKCLUSTER_ROOT_URL. When running in a task, this must
come from $TASKCLUSTER_ROOT_URL; when run on the command line, we apply a
defualt that points to the production deployment of Taskcluster."""
if 'TASKCLUSTER_ROOT_URL' not in os.environ:
if 'TASK_ID' in os.environ:
raise RuntimeError('$TASKCLUSTER_ROOT_URL must be set when running in a task')
else:
logger.debug('Using default TASKCLUSTER_ROOT_URL (Firefox CI production)')
return PRODUCTION_TASKCLUSTER_ROOT_URL
logger.debug('Running in Taskcluster instance {}{}'.format(
os.environ['TASKCLUSTER_ROOT_URL'],
' with taskcluster-proxy' if 'TASKCLUSTER_PROXY_URL' in os.environ else ''))
return os.environ['TASKCLUSTER_ROOT_URL']
@memoize
def get_session():