gecko-dev/taskcluster/taskgraph/transforms/make_task.py
Dustin J. Mitchell 5e8cbd7fb2 Bug 1290523: support generic-worker task descriptions; r=pmoore
MozReview-Commit-ID: CHIGSrB1MIu

--HG--
extra : rebase_source : 41e56a996867dde93fd8f5f67411f81200b507b8
2016-07-29 17:50:09 +00:00

362 lines
12 KiB
Python

# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
These transformations take a task description and turn it into a TaskCluster
task definition (along with attributes, label, etc.). The input to these
transformations is generic to any kind of task, but abstracts away some of the
complexities of worker implementations, scopes, and treeherder annotations.
"""
from taskgraph.util.treeherder import split_symbol
from taskgraph.transforms.base import (
validate_schema,
TransformSequence
)
from voluptuous import Schema, Any, Required, Optional, Extra
# shortcut for a string where task references are allowed
taskref_or_string = Any(
basestring,
{Required('task-reference'): basestring})
# A task description is a general description of a TaskCluster task
task_description_schema = Schema({
# the label for this task
'label': basestring,
# description of the task (for metadata)
'description': basestring,
# attributes for this task
'attributes': {basestring: object},
# dependencies of this task, keyed by name; these are passed through
# verbatim and subject to the interpretation of the Task's get_dependencies
# method.
'dependencies': {basestring: object},
# expiration and deadline times, relative to task creation, with units
# (e.g., "14 days")
'expires-after': basestring,
'deadline-after': basestring,
# custom routes for this task; the default treeherder routes will be added
# automatically
'routes': [basestring],
# custom scopes for this task; any scopes required for the worker will be
# added automatically
'scopes': [basestring],
# custom "task.extra" content
'extra': {basestring: object},
# treeherder-related information; see
# https://schemas.taskcluster.net/taskcluster-treeherder/v1/task-treeherder-config.json
'treeherder': {
# either a bare symbol, or "grp(sym)".
'symbol': basestring,
# the job kind
'kind': Any('build', 'test', 'other'),
# tier for this task
'tier': int,
# task platform, in the form platform/collection, used to set
# treeherder.machine.platform and treeherder.collection or
# treeherder.labels
'platform': basestring,
# treeherder environments (defaults to both staging and production)
Required('environments', default=['production', 'staging']): ['production', 'staging'],
},
# the provisioner-id/worker-type for the task
'worker-type': basestring,
# information specific to the worker implementation that will run this task
'worker': Any({
'implementation': Any('docker-worker', 'docker-engine'),
# the docker image (in docker's `host/repo/image:tag` format) in which
# to run the task; if omitted, this will be a reference to the image
# generated by the 'docker-image' dependency, which must be defined in
# 'dependencies'
Optional('docker-image'): basestring,
# worker features that should be enabled
Required('relengapi-proxy', default=False): bool,
Required('allow-ptrace', default=False): bool,
Required('loopback-video', default=False): bool,
Required('loopback-audio', default=False): bool,
# caches to set up for the task
'caches': [{
# only one type is supported by any of the workers right now
'type': 'persistent',
# name of the cache, allowing re-use by subsequent tasks naming the
# same cache
'name': basestring,
# location in the task image where the cache will be mounted
'mount-point': basestring,
}],
# artifacts to extract from the task image after completion
'artifacts': [{
# type of artifact -- simple file, or recursive directory
'type': Any('file', 'directory'),
# task image path from which to read artifact
'path': basestring,
# name of the produced artifact (root of the names for
# type=directory)
'name': basestring,
}],
# environment variables
'env': {basestring: taskref_or_string},
# the command to run
'command': [taskref_or_string],
# the maximum time to run, in seconds
'max-run-time': int,
}, {
'implementation': 'generic-worker',
# command is a list of commands to run, sequentially
'command': [basestring],
# artifacts to extract from the task image after completion; note that artifacts
# for the generic worker cannot have names
'artifacts': [{
# type of artifact -- simple file, or recursive directory
'type': Any('file', 'directory'),
# task image path from which to read artifact
'path': basestring,
}],
# environment variables
'env': {basestring: taskref_or_string},
# the maximum time to run, in seconds
'max-run-time': int,
}, {
'implementation': 'buildbot-bridge',
# see https://github.com/mozilla/buildbot-bridge/blob/master/bbb/schemas/payload.yml
'buildername': basestring,
'sourcestamp': {
'branch': basestring,
Optional('revision'): basestring,
Optional('repository'): basestring,
Optional('project'): basestring,
},
'properties': {
'product': basestring,
Extra: basestring, # additional properties are allowed
},
}),
})
GROUP_NAMES = {
'tc': 'Executed by TaskCluster',
'tc-e10s': 'Executed by TaskCluster with e10s',
'tc-Fxfn-l': 'Firefox functional tests (local) executed by TaskCluster',
'tc-Fxfn-l-e10s': 'Firefox functional tests (local) executed by TaskCluster with e10s',
'tc-Fxfn-r': 'Firefox functional tests (remote) executed by TaskCluster',
'tc-Fxfn-r-e10s': 'Firefox functional tests (remote) executed by TaskCluster with e10s',
'tc-M': 'Mochitests executed by TaskCluster',
'tc-M-e10s': 'Mochitests executed by TaskCluster with e10s',
'tc-R': 'Reftests executed by TaskCluster',
'tc-R-e10s': 'Reftests executed by TaskCluster with e10s',
'tc-VP': 'VideoPuppeteer tests executed by TaskCluster',
'tc-W': 'Web platform tests executed by TaskCluster',
'tc-W-e10s': 'Web platform tests executed by TaskCluster with e10s',
'tc-X': 'Xpcshell tests executed by TaskCluster',
'tc-X-e10s': 'Xpcshell tests executed by TaskCluster with e10s',
}
UNKNOWN_GROUP_NAME = "Treeherder group {} has no name; add it to " + __file__
# define a collection of payload builders, depending on the worker implementation
payload_builders = {}
def payload_builder(name):
def wrap(func):
payload_builders[name] = func
return func
return wrap
@payload_builder('docker-worker')
def build_docker_worker_payload(config, task, task_def):
worker = task['worker']
if 'docker-image' in worker:
# a literal image name
image = {
'type': 'docker-image',
'name': worker['docker-image'],
}
else:
assert 'docker-image' in task['dependencies'], 'no docker-worker dependency'
image = {
"path": "public/image.tar",
"taskId": {"task-reference": "<docker-image>"},
"type": "task-image",
}
features = {}
if worker.get('relengapi-proxy'):
features['relengAPIProxy'] = True
if worker.get('allow-ptrace'):
features['allowPtrace'] = True
task_def['scopes'].append('docker-worker:feature:allowPtrace')
capabilities = {}
for lo in 'audio', 'video':
if worker.get('loopback-' + lo):
capitalized = 'loopback' + lo.capitalize()
devices = capabilities.setdefault('devices', {})
devices[capitalized] = True
task_def['scopes'].append('docker-worker:capability:device:' + capitalized)
caches = {}
for cache in worker['caches']:
caches[cache['name']] = cache['mount-point']
task_def['scopes'].append('docker-worker:cache:' + cache['name'])
artifacts = {}
for artifact in worker['artifacts']:
artifacts[artifact['name']] = {
'path': artifact['path'],
'type': artifact['type'],
'expires': task_def['expires'], # always expire with the task
}
task_def['payload'] = payload = {
'command': worker['command'],
'cache': caches,
'artifacts': artifacts,
'image': image,
'env': worker['env'],
'maxRunTime': worker['max-run-time'],
}
if features:
payload['features'] = features
if capabilities:
payload['capabilities'] = capabilities
@payload_builder('generic-worker')
def build_generic_worker_payload(config, task, task_def):
worker = task['worker']
artifacts = []
for artifact in worker['artifacts']:
artifacts.append({
'path': artifact['path'],
'type': artifact['type'],
'expires': task_def['expires'], # always expire with the task
})
task_def['payload'] = {
'command': worker['command'],
'artifacts': artifacts,
'env': worker['env'],
'maxRunTime': worker['max-run-time'],
}
transforms = TransformSequence()
@transforms.add
def validate(config, tasks):
for task in tasks:
yield validate_schema(
task_description_schema, task,
"In task {!r}:".format(task.get('label', '?no-label?')))
@transforms.add
def build_task(config, tasks):
for task in tasks:
provisioner_id, worker_type = task['worker-type'].split('/', 1)
routes = task['routes']
scopes = task['scopes']
# set up extra
extra = task['extra']
extra['treeherderEnv'] = task['treeherder']['environments']
task_th = task['treeherder']
treeherder = extra.setdefault('treeherder', {})
machine_platform, collection = task_th['platform'].split('/', 1)
treeherder['machine'] = {'platform': machine_platform}
treeherder['collection'] = {collection: True}
groupSymbol, symbol = split_symbol(task_th['symbol'])
if groupSymbol != '?':
treeherder['groupSymbol'] = groupSymbol
if groupSymbol not in GROUP_NAMES:
raise Exception(UNKNOWN_GROUP_NAME.format(groupSymbol))
treeherder['groupName'] = GROUP_NAMES[groupSymbol]
treeherder['symbol'] = symbol
treeherder['jobKind'] = task_th['kind']
treeherder['tier'] = task_th['tier']
routes.extend([
'{}.v2.{}.{}.{}'.format(root,
config.params['project'],
config.params['head_rev'],
config.params['pushlog_id'])
for root in 'tc-treeherder', 'tc-treeherder-stage'
])
task_def = {
'provisionerId': provisioner_id,
'workerType': worker_type,
'routes': routes,
'created': {'relative-datestamp': '0 seconds'},
'deadline': {'relative-datestamp': task['deadline-after']},
'expires': {'relative-datestamp': task['expires-after']},
'scopes': scopes,
'metadata': {
'description': task['description'],
'name': task['label'],
'owner': config.params['owner'],
'source': '{}/file/{}/{}'.format(
config.params['head_repository'],
config.params['head_rev'],
config.path),
},
'extra': extra,
'tags': {'createdForUser': config.params['owner']},
}
# add the payload and adjust anything else as required (e.g., scopes)
payload_builders[task['worker']['implementation']](config, task, task_def)
yield {
'label': task['label'],
'task': task_def,
'dependencies': task['dependencies'],
'attributes': task['attributes'],
}