mirror of
https://github.com/mozilla/gecko-dev.git
synced 2025-02-28 21:28:55 +00:00
Bug 1274611: implement docker image builds as a distinct kind; r=wcosta
MozReview-Commit-ID: 81Ad9LcBdx6 --HG-- extra : rebase_source : 3a2c06040f7af8b757eec2991360a7a4d3cf61cb
This commit is contained in:
parent
954a9c8264
commit
d788a52aeb
56
taskcluster/ci/docker-image/image.yml
Normal file
56
taskcluster/ci/docker-image/image.yml
Normal file
@ -0,0 +1,56 @@
|
||||
---
|
||||
task:
|
||||
created: '{{now}}'
|
||||
deadline: '{{#from_now}}24 hours{{/from_now}}'
|
||||
metadata:
|
||||
name: 'Docker Image Build: {{image_name}}'
|
||||
description: 'Build the docker image {{image_name}} for use by dependent tasks'
|
||||
source: '{{source}}'
|
||||
owner: mozilla-taskcluster-maintenance@mozilla.com
|
||||
tags:
|
||||
createdForUser: {{owner}}
|
||||
|
||||
workerType: taskcluster-images
|
||||
provisionerId: aws-provisioner-v1
|
||||
|
||||
routes:
|
||||
- index.docker.images.v1.{{project}}.{{image_name}}.latest
|
||||
- index.docker.images.v1.{{project}}.{{image_name}}.pushdate.{{year}}.{{month}}-{{day}}-{{pushtime}}
|
||||
- index.docker.images.v1.{{project}}.{{image_name}}.hash.{{context_hash}}
|
||||
- tc-treeherder.{{project}}.{{revision_hash}}
|
||||
- tc-treeherder-stage.{{project}}.{{revision_hash}}
|
||||
|
||||
payload:
|
||||
env:
|
||||
HASH: '{{context_hash}}'
|
||||
PROJECT: '{{project}}'
|
||||
CONTEXT_URL: '{{context_url}}'
|
||||
CONTEXT_PATH: '{{context_path}}'
|
||||
BASE_REPOSITORY: '{{base_repository}}'
|
||||
HEAD_REPOSITORY: '{{head_repository}}'
|
||||
HEAD_REV: '{{head_rev}}'
|
||||
HEAD_REF: '{{head_ref}}'
|
||||
features:
|
||||
dind: true
|
||||
image: '{{#docker_image}}image_builder{{/docker_image}}'
|
||||
command:
|
||||
- /bin/bash
|
||||
- -c
|
||||
- /home/worker/bin/build_image.sh
|
||||
maxRunTime: 3600
|
||||
artifacts:
|
||||
'{{artifact_path}}':
|
||||
type: 'file'
|
||||
path: '/artifacts/image.tar'
|
||||
expires: '{{#from_now}}1 year{{/from_now}}'
|
||||
extra:
|
||||
treeherderEnv:
|
||||
- staging
|
||||
- production
|
||||
treeherder:
|
||||
revision: {{head_rev}}
|
||||
revision_hash: {{revision_hash}}
|
||||
build:
|
||||
platform: 'taskcluster-images'
|
||||
symbol: 'I'
|
||||
|
19
taskcluster/ci/docker-image/kind.yml
Normal file
19
taskcluster/ci/docker-image/kind.yml
Normal file
@ -0,0 +1,19 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
implementation: 'taskgraph.kind.docker_image:DockerImageKind'
|
||||
images_path: '../../../testing/docker'
|
||||
|
||||
# make a task for each docker-image we might want. For the moment, since we
|
||||
# write artifacts for each, these are whitelisted, but ideally that will change
|
||||
# (to use subdirectory clones of the proper directory), at which point we can
|
||||
# generate tasks for every docker image in the directory, secure in the
|
||||
# knowledge that unnecessary images will be omitted from the target task graph
|
||||
images:
|
||||
- desktop-test
|
||||
- desktop-build
|
||||
- builder
|
||||
- tester
|
||||
- lint
|
||||
- android-gradle-build
|
@ -93,3 +93,7 @@ post_build
|
||||
(deprecated) The name of the post-build activity. This is valid only for the
|
||||
``legacy`` kind.
|
||||
|
||||
image_name
|
||||
==========
|
||||
|
||||
For the ``docker_image`` kind, this attribute contains the docker image name.
|
||||
|
162
taskcluster/taskgraph/kind/docker_image.py
Normal file
162
taskcluster/taskgraph/kind/docker_image.py
Normal file
@ -0,0 +1,162 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
import urllib2
|
||||
import hashlib
|
||||
import tarfile
|
||||
import time
|
||||
|
||||
from . import base
|
||||
from ..types import Task
|
||||
from taskgraph.util import docker_image
|
||||
import taskcluster_graph.transform.routes as routes_transform
|
||||
import taskcluster_graph.transform.treeherder as treeherder_transform
|
||||
from taskcluster_graph.templates import Templates
|
||||
from taskcluster_graph.from_now import (
|
||||
json_time_from_now,
|
||||
current_json_time,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
GECKO = os.path.realpath(os.path.join(__file__, '..', '..', '..', '..'))
|
||||
ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
|
||||
INDEX_URL = 'https://index.taskcluster.net/v1/task/{}'
|
||||
|
||||
|
||||
class DockerImageKind(base.Kind):
|
||||
|
||||
def load_tasks(self, params):
|
||||
# TODO: make this match the pushdate (get it from a parameter rather than vcs)
|
||||
pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime())
|
||||
|
||||
parameters = {
|
||||
'pushdate': pushdate,
|
||||
'pushtime': pushdate[8:],
|
||||
'year': pushdate[0:4],
|
||||
'month': pushdate[4:6],
|
||||
'day': pushdate[6:8],
|
||||
'project': params['project'],
|
||||
'docker_image': docker_image,
|
||||
'base_repository': params['base_repository'] or params['head_repository'],
|
||||
'head_repository': params['head_repository'],
|
||||
'head_ref': params['head_ref'] or params['head_rev'],
|
||||
'head_rev': params['head_rev'],
|
||||
'owner': params['owner'],
|
||||
'level': params['level'],
|
||||
'from_now': json_time_from_now,
|
||||
'now': current_json_time(),
|
||||
'revision_hash': params['revision_hash'],
|
||||
'source': '{repo}file/{rev}/testing/taskcluster/tasks/image.yml'
|
||||
.format(repo=params['head_repository'], rev=params['head_rev']),
|
||||
}
|
||||
|
||||
tasks = []
|
||||
templates = Templates(self.path)
|
||||
for image_name in self.config['images']:
|
||||
context_path = os.path.join('testing', 'docker', image_name)
|
||||
context_hash = self.generate_context_hash(context_path)
|
||||
|
||||
image_parameters = dict(parameters)
|
||||
image_parameters['context_hash'] = context_hash
|
||||
image_parameters['context_path'] = context_path
|
||||
image_parameters['artifact_path'] = 'public/image.tar'
|
||||
image_parameters['image_name'] = image_name
|
||||
|
||||
image_artifact_path = "public/decision_task/image_contexts/{}/context.tar.gz".format(image_name)
|
||||
if os.environ.get('TASK_ID'):
|
||||
destination = os.path.join(
|
||||
os.environ['HOME'],
|
||||
"artifacts/decision_task/image_contexts/{}/context.tar.gz".format(image_name))
|
||||
image_parameters['context_url'] = ARTIFACT_URL.format(os.environ['TASK_ID'], image_artifact_path)
|
||||
self.create_context_tar(context_path, destination, image_name)
|
||||
else:
|
||||
# skip context generation since this isn't a decision task
|
||||
# TODO: generate context tarballs using subdirectory clones in
|
||||
# the image-building task so we don't have to worry about this.
|
||||
image_parameters['context_url'] = 'file:///tmp/' + image_artifact_path
|
||||
|
||||
image_task = templates.load('image.yml', image_parameters)
|
||||
|
||||
attributes = {
|
||||
'kind': self.name,
|
||||
'image_name': image_name,
|
||||
}
|
||||
|
||||
# As an optimization, if the context hash exists for mozilla-central, that image
|
||||
# task ID will be used. The reasoning behind this is that eventually everything ends
|
||||
# up on mozilla-central at some point if most tasks use this as a common image
|
||||
# for a given context hash, a worker within Taskcluster does not need to contain
|
||||
# the same image per branch.
|
||||
index_paths = ['docker.images.v1.{}.{}.hash.{}'.format(project, image_name, context_hash)
|
||||
for project in ['mozilla-central', params['project']]]
|
||||
|
||||
tasks.append(Task(self, 'build-docker-image-' + image_name,
|
||||
task=image_task['task'], attributes=attributes,
|
||||
index_paths=index_paths))
|
||||
|
||||
return tasks
|
||||
|
||||
def get_task_dependencies(self, task, taskgraph):
|
||||
return []
|
||||
|
||||
def optimize_task(self, task, taskgraph):
|
||||
for index_path in task.extra['index_paths']:
|
||||
try:
|
||||
url = INDEX_URL.format(index_path)
|
||||
existing_task = json.load(urllib2.urlopen(url))
|
||||
# Only return the task ID if the artifact exists for the indexed
|
||||
# task. Otherwise, continue on looking at each of the branches. Method
|
||||
# continues trying other branches in case mozilla-central has an expired
|
||||
# artifact, but 'project' might not. Only return no task ID if all
|
||||
# branches have been tried
|
||||
request = urllib2.Request(ARTIFACT_URL.format(existing_task['taskId'], 'public/image.tar'))
|
||||
request.get_method = lambda: 'HEAD'
|
||||
urllib2.urlopen(request)
|
||||
|
||||
# HEAD success on the artifact is enough
|
||||
return True, existing_task['taskId']
|
||||
except urllib2.HTTPError:
|
||||
pass
|
||||
|
||||
return False, None
|
||||
|
||||
def create_context_tar(self, context_dir, destination, image_name):
|
||||
'Creates a tar file of a particular context directory.'
|
||||
destination = os.path.abspath(destination)
|
||||
if not os.path.exists(os.path.dirname(destination)):
|
||||
os.makedirs(os.path.dirname(destination))
|
||||
|
||||
with tarfile.open(destination, 'w:gz') as tar:
|
||||
tar.add(context_dir, arcname=image_name)
|
||||
|
||||
def generate_context_hash(self, image_path):
|
||||
'''Generates a sha256 hash for context directory used to build an image.
|
||||
|
||||
Contents of the directory are sorted alphabetically, contents of each file is hashed,
|
||||
and then a hash is created for both the file hashes as well as their paths.
|
||||
|
||||
This ensures that hashs are consistent and also change based on if file locations
|
||||
within the context directory change.
|
||||
'''
|
||||
context_hash = hashlib.sha256()
|
||||
files = []
|
||||
|
||||
for dirpath, dirnames, filenames in os.walk(os.path.join(GECKO, image_path)):
|
||||
for filename in filenames:
|
||||
files.append(os.path.join(dirpath, filename))
|
||||
|
||||
for filename in sorted(files):
|
||||
relative_filename = filename.replace(GECKO, '')
|
||||
with open(filename, 'rb') as f:
|
||||
file_hash = hashlib.sha256()
|
||||
data = f.read()
|
||||
file_hash.update(data)
|
||||
context_hash.update(file_hash.hexdigest() + '\t' + relative_filename + '\n')
|
||||
|
||||
return context_hash.hexdigest()
|
@ -28,17 +28,13 @@ from taskcluster_graph.mach_util import (
|
||||
import taskcluster_graph.transform.routes as routes_transform
|
||||
import taskcluster_graph.transform.treeherder as treeherder_transform
|
||||
from taskcluster_graph.commit_parser import parse_commit
|
||||
from taskcluster_graph.image_builder import (
|
||||
docker_image,
|
||||
normalize_image_details,
|
||||
task_id_for_image
|
||||
)
|
||||
from taskcluster_graph.from_now import (
|
||||
json_time_from_now,
|
||||
current_json_time,
|
||||
)
|
||||
from taskcluster_graph.templates import Templates
|
||||
import taskcluster_graph.build_task
|
||||
from taskgraph.util import docker_image
|
||||
|
||||
# TASKID_PLACEHOLDER is the "internal" form of a taskid; it is substituted with
|
||||
# actual taskIds at the very last minute, in get_task_definition
|
||||
@ -76,6 +72,8 @@ def set_expiration(task, timestamp):
|
||||
for artifact in artifacts.values():
|
||||
artifact['expires'] = timestamp
|
||||
|
||||
|
||||
|
||||
class LegacyKind(base.Kind):
|
||||
"""
|
||||
This kind generates a full task graph from the old YAML files in
|
||||
@ -121,13 +119,11 @@ class LegacyKind(base.Kind):
|
||||
changed_files |= set(c['files'])
|
||||
|
||||
# Template parameters used when expanding the graph
|
||||
seen_images = {}
|
||||
parameters = dict(gaia_info().items() + {
|
||||
'index': 'index',
|
||||
'project': project,
|
||||
'pushlog_id': params.get('pushlog_id', 0),
|
||||
'docker_image': docker_image,
|
||||
'task_id_for_image': partial(task_id_for_image, seen_images, project),
|
||||
'base_repository': params['base_repository'] or
|
||||
params['head_repository'],
|
||||
'head_repository': params['head_repository'],
|
||||
@ -231,11 +227,6 @@ class LegacyKind(base.Kind):
|
||||
build_parameters['build_type'] = task_extra['build_type']
|
||||
build_parameters['build_product'] = task_extra['build_product']
|
||||
|
||||
normalize_image_details(graph,
|
||||
build_task,
|
||||
seen_images,
|
||||
build_parameters,
|
||||
os.environ.get('TASK_ID', None))
|
||||
set_interactive_task(build_task, interactive)
|
||||
|
||||
# try builds don't use cache
|
||||
@ -319,11 +310,6 @@ class LegacyKind(base.Kind):
|
||||
mklabel(),
|
||||
templates,
|
||||
build_treeherder_config)
|
||||
normalize_image_details(graph,
|
||||
post_task,
|
||||
seen_images,
|
||||
build_parameters,
|
||||
os.environ.get('TASK_ID', None))
|
||||
set_interactive_task(post_task, interactive)
|
||||
treeherder_transform.add_treeherder_revision_info(post_task['task'],
|
||||
params['head_rev'],
|
||||
@ -373,11 +359,6 @@ class LegacyKind(base.Kind):
|
||||
mklabel(),
|
||||
templates,
|
||||
build_treeherder_config)
|
||||
normalize_image_details(graph,
|
||||
test_task,
|
||||
seen_images,
|
||||
build_parameters,
|
||||
os.environ.get('TASK_ID', None))
|
||||
set_interactive_task(test_task, interactive)
|
||||
|
||||
if params['revision_hash']:
|
||||
@ -433,7 +414,13 @@ class LegacyKind(base.Kind):
|
||||
def get_task_dependencies(self, task, taskgraph):
|
||||
# fetch dependency information from the cached graph
|
||||
taskdict = self.tasks_by_label[task.label]
|
||||
return [(label, label) for label in taskdict.get('requires', [])]
|
||||
deps = [(label, label) for label in taskdict.get('requires', [])]
|
||||
|
||||
# add a dependency on an image task, if needed
|
||||
if 'docker-image' in taskdict:
|
||||
deps.append(('build-docker-image-{docker-image}'.format(**taskdict), 'docker-image'))
|
||||
|
||||
return deps
|
||||
|
||||
def optimize_task(self, task, taskgraph):
|
||||
# no optimization for the moment
|
||||
|
54
taskcluster/taskgraph/test/test_kind_docker_image.py
Normal file
54
taskcluster/taskgraph/test/test_kind_docker_image.py
Normal file
@ -0,0 +1,54 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import unittest
|
||||
import tempfile
|
||||
import shutil
|
||||
import os
|
||||
|
||||
from ..kind import docker_image
|
||||
from ..types import Task
|
||||
from mozunit import main, MockedOpen
|
||||
|
||||
|
||||
class TestDockerImageKind(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.kind = docker_image.DockerImageKind(
|
||||
os.path.join(docker_image.GECKO, 'taskcluster', 'ci', 'docker-image'),
|
||||
{})
|
||||
|
||||
def test_get_task_dependencies(self):
|
||||
# this one's easy!
|
||||
self.assertEqual(self.kind.get_task_dependencies(None, None), [])
|
||||
|
||||
# TODO: optimize_task
|
||||
|
||||
def test_create_context_tar(self):
|
||||
image_dir = os.path.join(docker_image.GECKO, 'testing', 'docker', 'image_builder')
|
||||
tarball = tempfile.mkstemp()[1]
|
||||
self.kind.create_context_tar(image_dir, tarball, 'image_builder')
|
||||
self.failUnless(os.path.exists(tarball))
|
||||
os.unlink(tarball)
|
||||
|
||||
def test_generate_context_hash(self):
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
old_GECKO = docker_image.GECKO
|
||||
docker_image.GECKO = tmpdir
|
||||
try:
|
||||
os.makedirs(os.path.join(tmpdir, 'docker', 'my-image'))
|
||||
with open(os.path.join(tmpdir, 'docker', 'my-image', 'Dockerfile'), "w") as f:
|
||||
f.write("FROM node\nADD a-file\n")
|
||||
with open(os.path.join(tmpdir, 'docker', 'my-image', 'a-file'), "w") as f:
|
||||
f.write("data\n")
|
||||
self.assertEqual(self.kind.generate_context_hash('docker/my-image'),
|
||||
'781143fcc6cc72c9024b058665265cb6bae3fb8031cad7227dd169ffbfced434')
|
||||
finally:
|
||||
docker_image.GECKO = old_GECKO
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
27
taskcluster/taskgraph/test/test_util.py
Normal file
27
taskcluster/taskgraph/test/test_util.py
Normal file
@ -0,0 +1,27 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import unittest
|
||||
|
||||
from ..util import docker_image, DOCKER_ROOT
|
||||
from mozunit import main, MockedOpen
|
||||
|
||||
|
||||
class TestDockerImage(unittest.TestCase):
|
||||
|
||||
def test_docker_image_explicit_registry(self):
|
||||
files = {}
|
||||
files["{}/myimage/REGISTRY".format(DOCKER_ROOT)] = "cool-images"
|
||||
files["{}/myimage/VERSION".format(DOCKER_ROOT)] = "1.2.3"
|
||||
with MockedOpen(files):
|
||||
self.assertEqual(docker_image('myimage'), "cool-images/myimage:1.2.3")
|
||||
|
||||
def test_docker_image_default_registry(self):
|
||||
files = {}
|
||||
files["{}/REGISTRY".format(DOCKER_ROOT)] = "mozilla"
|
||||
files["{}/myimage/VERSION".format(DOCKER_ROOT)] = "1.2.3"
|
||||
with MockedOpen(files):
|
||||
self.assertEqual(docker_image('myimage'), "mozilla/myimage:1.2.3")
|
25
taskcluster/taskgraph/util.py
Normal file
25
taskcluster/taskgraph/util.py
Normal file
@ -0,0 +1,25 @@
|
||||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
GECKO = os.path.realpath(os.path.join(__file__, '..', '..', '..'))
|
||||
DOCKER_ROOT = os.path.join(GECKO, 'testing', 'docker')
|
||||
|
||||
def docker_image(name):
|
||||
''' Determine the docker image name, including repository and tag, from an
|
||||
in-tree docker file'''
|
||||
try:
|
||||
with open(os.path.join(DOCKER_ROOT, name, 'REGISTRY')) as f:
|
||||
registry = f.read().strip()
|
||||
except IOError:
|
||||
with open(os.path.join(DOCKER_ROOT, 'REGISTRY')) as f:
|
||||
registry = f.read().strip()
|
||||
|
||||
with open(os.path.join(DOCKER_ROOT, name, 'VERSION')) as f:
|
||||
version = f.read().strip()
|
||||
|
||||
return '{}/{}:{}'.format(registry, name, version)
|
@ -14,10 +14,5 @@
|
||||
"{index}.gecko.v2.{project}.revision.{head_rev}.{build_product}-l10n.{build_name}-{build_type}.{locale}",
|
||||
"{index}.gecko.v2.{project}.pushdate.{year}.{month}.{day}.{pushdate}.{build_product}-l10n.{build_name}-{build_type}.{locale}",
|
||||
"{index}.gecko.v2.{project}.latest.{build_product}-l10n.{build_name}-{build_type}.{locale}"
|
||||
],
|
||||
"docker_images": [
|
||||
"{index}.docker.images.v1.{project}.{image_name}.latest",
|
||||
"{index}.docker.images.v1.{project}.{image_name}.pushdate.{year}.{month}-{day}-{pushtime}",
|
||||
"{index}.docker.images.v1.{project}.{image_name}.hash.{context_hash}"
|
||||
]
|
||||
}
|
||||
|
@ -28,18 +28,6 @@ def is_docker_registry_image(registry_path):
|
||||
def mklabel():
|
||||
return slugid()
|
||||
|
||||
def docker_image(name):
|
||||
''' Determine the docker tag/revision from an in tree docker file '''
|
||||
repository_path = os.path.join(DOCKER_ROOT, name, 'REGISTRY')
|
||||
repository = REGISTRY
|
||||
|
||||
version = open(os.path.join(DOCKER_ROOT, name, 'VERSION')).read().strip()
|
||||
|
||||
if os.path.isfile(repository_path):
|
||||
repository = open(repository_path).read().strip()
|
||||
|
||||
return '{}/{}:{}'.format(repository, name, version)
|
||||
|
||||
def task_id_for_image(seen_images, project, name, create=True):
|
||||
if name in seen_images:
|
||||
return seen_images[name]['taskId']
|
||||
@ -133,118 +121,6 @@ def generate_context_hash(image_path):
|
||||
|
||||
return context_hash.hexdigest()
|
||||
|
||||
def create_context_tar(context_dir, destination, image_name):
|
||||
''' Creates a tar file of a particular context directory '''
|
||||
if not os.path.exists(os.path.dirname(destination)):
|
||||
os.makedirs(os.path.dirname(destination))
|
||||
|
||||
with tarfile.open(destination, 'w:gz') as tar:
|
||||
tar.add(context_dir, arcname=image_name)
|
||||
|
||||
def image_requires_building(details):
|
||||
''' Returns true if an image task should be created for a particular image '''
|
||||
if 'path' in details and 'hash' in details:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def create_image_task_parameters(params, name, details):
|
||||
image_parameters = dict(params)
|
||||
image_parameters['context_hash'] = details['hash']
|
||||
image_parameters['context_path'] = details['path']
|
||||
image_parameters['artifact_path'] = 'public/image.tar'
|
||||
image_parameters['image_slugid'] = details['taskId']
|
||||
image_parameters['image_name'] = name
|
||||
|
||||
return image_parameters
|
||||
|
||||
def get_image_details(seen_images, task_id):
|
||||
'''
|
||||
Based on a collection of image details, return the details
|
||||
for an image matching the requested task_id.
|
||||
|
||||
Image details can include a path and hash indicating that the image requires
|
||||
building.
|
||||
'''
|
||||
for name, details in seen_images.items():
|
||||
if details['taskId'] == task_id:
|
||||
return [name, details]
|
||||
return None
|
||||
|
||||
def get_json_routes():
|
||||
''' Returns routes that should be included in the image task. '''
|
||||
routes_file = os.path.join(TASKCLUSTER_ROOT, 'routes.json')
|
||||
with open(routes_file) as f:
|
||||
contents = json.load(f)
|
||||
json_routes = contents['docker_images']
|
||||
return json_routes
|
||||
|
||||
def normalize_image_details(graph, task, seen_images, params, decision_task_id):
|
||||
'''
|
||||
This takes a task-image payload and creates an image task to build that
|
||||
image.
|
||||
|
||||
task-image payload is then converted to use a specific task ID of that
|
||||
built image. All tasks within the graph requiring this same image will have their
|
||||
image details normalized and require the same image build task.
|
||||
'''
|
||||
image = task['task']['payload']['image']
|
||||
if isinstance(image, str) or image.get('type', 'docker-image') == 'docker-image':
|
||||
return
|
||||
|
||||
if 'requires' not in task:
|
||||
task['requires'] = []
|
||||
|
||||
name, details = get_image_details(seen_images, image['taskId'])
|
||||
|
||||
if details.get('required', False) is True or image_requires_building(details) is False:
|
||||
if 'required' in details:
|
||||
task['requires'].append(details['taskId'])
|
||||
return
|
||||
|
||||
image_parameters = create_image_task_parameters(params, name, details)
|
||||
|
||||
if decision_task_id:
|
||||
image_artifact_path = "public/decision_task/image_contexts/{}/context.tar.gz".format(name)
|
||||
destination = "/home/worker/artifacts/decision_task/image_contexts/{}/context.tar.gz".format(name)
|
||||
image_parameters['context_url'] = ARTIFACT_URL.format(decision_task_id, image_artifact_path)
|
||||
|
||||
create_context_tar(image_parameters['context_path'], destination, name)
|
||||
|
||||
templates = Templates(TASKCLUSTER_ROOT)
|
||||
image_task = templates.load(IMAGE_BUILD_TASK, image_parameters)
|
||||
if params['revision_hash']:
|
||||
treeherder_transform.add_treeherder_revision_info(
|
||||
image_task['task'],
|
||||
params['head_rev'],
|
||||
params['revision_hash']
|
||||
)
|
||||
routes_transform.decorate_task_treeherder_routes(
|
||||
image_task['task'],
|
||||
"{}.{}".format(params['project'], params['revision_hash'])
|
||||
)
|
||||
routes_transform.decorate_task_json_routes(image_task['task'],
|
||||
get_json_routes(),
|
||||
image_parameters)
|
||||
|
||||
image_task['attributes'] = {
|
||||
'kind': 'legacy',
|
||||
}
|
||||
|
||||
graph['tasks'].append(image_task);
|
||||
task['requires'].append(details['taskId'])
|
||||
|
||||
define_task = DEFINE_TASK.format(
|
||||
image_task['task']['workerType']
|
||||
)
|
||||
|
||||
graph['scopes'].add(define_task)
|
||||
graph['scopes'] |= set(image_task['task'].get('scopes', []))
|
||||
route_scopes = map(lambda route: 'queue:route:' + route, image_task['task'].get('routes', []))
|
||||
graph['scopes'] |= set(route_scopes)
|
||||
|
||||
details['required'] = True
|
||||
|
||||
def docker_load_from_url(url):
|
||||
"""Get a docker image from a `docker save` tarball at the given URL,
|
||||
loading it into the running daemon and returning the image name."""
|
||||
|
@ -6,6 +6,7 @@ $inherits:
|
||||
variables:
|
||||
build_name: 'android-api-15-gradle-dependencies'
|
||||
build_type: 'opt'
|
||||
docker-image: android-gradle-build
|
||||
task:
|
||||
metadata:
|
||||
name: '[TC] Android armv7 API 15+ gradle dependencies'
|
||||
@ -50,7 +51,8 @@ task:
|
||||
image:
|
||||
type: 'task-image'
|
||||
path: 'public/image.tar'
|
||||
taskId: '{{#task_id_for_image}}android-gradle-build{{/task_id_for_image}}'
|
||||
taskId:
|
||||
task-reference: "<docker-image>"
|
||||
|
||||
command:
|
||||
- /bin/bash
|
||||
|
@ -2,12 +2,14 @@ $inherits:
|
||||
from: 'tasks/build.yml'
|
||||
variables:
|
||||
build_product: 'b2g'
|
||||
docker-image: builder
|
||||
task:
|
||||
payload:
|
||||
image:
|
||||
type: 'task-image'
|
||||
path: 'public/image.tar'
|
||||
taskId: '{{#task_id_for_image}}builder{{/task_id_for_image}}'
|
||||
taskId:
|
||||
task-reference: "<docker-image>"
|
||||
extra:
|
||||
locations:
|
||||
test_packages: 'public/build/target.test_packages.json'
|
||||
|
@ -2,6 +2,7 @@ $inherits:
|
||||
from: 'tasks/build.yml'
|
||||
variables:
|
||||
build_product: 'firefox'
|
||||
docker-image: desktop-build
|
||||
task:
|
||||
scopes:
|
||||
- "secrets:get:project/releng/gecko/build/level-{{level}}/*"
|
||||
@ -13,6 +14,7 @@ task:
|
||||
image:
|
||||
type: 'task-image'
|
||||
path: 'public/image.tar'
|
||||
taskId: '{{#task_id_for_image}}desktop-build{{/task_id_for_image}}'
|
||||
taskId:
|
||||
task-reference: "<docker-image>"
|
||||
features:
|
||||
taskclusterProxy: true
|
||||
|
@ -2,10 +2,12 @@ $inherits:
|
||||
from: 'tasks/build.yml'
|
||||
variables:
|
||||
build_product: 'mobile'
|
||||
docker-image: desktop-build
|
||||
task:
|
||||
payload:
|
||||
image:
|
||||
type: 'task-image'
|
||||
path: 'public/image.tar'
|
||||
taskId: '{{#task_id_for_image}}desktop-build{{/task_id_for_image}}'
|
||||
taskId:
|
||||
task-reference: "<docker-image>"
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
# checking out the source tree.
|
||||
---
|
||||
taskId: {{build_slugid}}
|
||||
|
||||
docker-image: desktop-build
|
||||
task:
|
||||
created: '{{now}}'
|
||||
deadline: '{{#from_now}}24 hours{{/from_now}}'
|
||||
@ -28,7 +28,8 @@ task:
|
||||
image:
|
||||
type: 'task-image'
|
||||
path: 'public/image.tar'
|
||||
taskId: '{{#task_id_for_image}}desktop-build{{/task_id_for_image}}'
|
||||
taskId:
|
||||
task-reference: "<docker-image>"
|
||||
cache:
|
||||
# The taskcluster-vcs tooling stores the large clone caches in this
|
||||
# directory and will reuse them for new requests this saves about 20s~
|
||||
|
@ -1,7 +1,7 @@
|
||||
# This tasks takes a mulet build, pull gaia and craft a xpi file for FxOS simulator addon
|
||||
---
|
||||
taskId: {{taskId}}
|
||||
|
||||
taskId: {{build_slugid}}
|
||||
docker-image: builder
|
||||
task:
|
||||
created: '{{now}}'
|
||||
deadline: '{{#from_now}}24 hours{{/from_now}}'
|
||||
@ -27,7 +27,8 @@ task:
|
||||
image:
|
||||
type: 'task-image'
|
||||
path: 'public/image.tar'
|
||||
taskId: '{{#task_id_for_image}}builder{{/task_id_for_image}}'
|
||||
taskId:
|
||||
task-reference: "<docker-image>"
|
||||
|
||||
maxRunTime: 600
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
# This task is the base for most tests in gecko.
|
||||
docker-image: tester
|
||||
task:
|
||||
created: '{{now}}'
|
||||
deadline: '{{#from_now}}24 hours{{/from_now}}'
|
||||
@ -15,7 +16,12 @@ task:
|
||||
- 'docker-worker:feature:allowPtrace'
|
||||
|
||||
payload:
|
||||
image: '{{#docker_image}}tester{{/docker_image}}'
|
||||
image:
|
||||
type: 'task-image'
|
||||
path: 'public/image.tar'
|
||||
taskId:
|
||||
task-reference: "<docker-image>"
|
||||
|
||||
maxRunTime: 3600
|
||||
env:
|
||||
MOZILLA_BUILD_URL: {"task-reference": "https://queue.taskcluster.net/v1/task/<{{build_slugid}}>/artifacts/{{build_location}}"}
|
||||
|
@ -6,6 +6,7 @@ $inherits:
|
||||
build_name: 'eslint-gecko'
|
||||
build_type: 'opt'
|
||||
|
||||
docker-image: lint
|
||||
task:
|
||||
metadata:
|
||||
name: '[TC] - ESLint'
|
||||
@ -15,7 +16,8 @@ task:
|
||||
image:
|
||||
type: 'task-image'
|
||||
path: 'public/image.tar'
|
||||
taskId: '{{#task_id_for_image}}lint{{/task_id_for_image}}'
|
||||
taskId:
|
||||
task-reference: "<docker-image>"
|
||||
|
||||
command:
|
||||
- bash
|
||||
|
@ -1,6 +1,7 @@
|
||||
---
|
||||
$inherits:
|
||||
from: 'tasks/test.yml'
|
||||
docker-image: desktop-test
|
||||
task:
|
||||
workerType: desktop-test
|
||||
scopes:
|
||||
@ -9,7 +10,9 @@ task:
|
||||
image:
|
||||
type: 'task-image'
|
||||
path: 'public/image.tar'
|
||||
taskId: '{{#task_id_for_image}}desktop-test{{/task_id_for_image}}'
|
||||
taskId:
|
||||
task-reference: "<docker-image>"
|
||||
|
||||
env:
|
||||
NEED_WINDOW_MANAGER: true
|
||||
NEED_PULSEAUDIO: true
|
||||
|
@ -6,6 +6,7 @@ $inherits:
|
||||
build_name: 'mozharness-tox'
|
||||
build_type: 'opt'
|
||||
|
||||
docker-image: desktop-test
|
||||
task:
|
||||
metadata:
|
||||
name: '[TC] - Mozharness Tox'
|
||||
@ -18,7 +19,8 @@ task:
|
||||
image:
|
||||
type: 'task-image'
|
||||
path: 'public/image.tar'
|
||||
taskId: '{{#task_id_for_image}}desktop-test{{/task_id_for_image}}'
|
||||
taskId:
|
||||
task-reference: "<docker-image>"
|
||||
|
||||
cache:
|
||||
level-{{level}}-{{project}}-dotcache: '/home/worker/.cache'
|
||||
|
@ -6,6 +6,7 @@ $inherits:
|
||||
build_name: 'flake8-gecko'
|
||||
build_type: 'opt'
|
||||
|
||||
docker-image: lint
|
||||
task:
|
||||
metadata:
|
||||
name: '[TC] - Flake8'
|
||||
@ -14,7 +15,8 @@ task:
|
||||
image:
|
||||
type: 'task-image'
|
||||
path: 'public/image.tar'
|
||||
taskId: '{{#task_id_for_image}}lint{{/task_id_for_image}}'
|
||||
taskId:
|
||||
task-reference: "<docker-image>"
|
||||
command:
|
||||
- bash
|
||||
- -cx
|
||||
|
Loading…
x
Reference in New Issue
Block a user