summaryrefslogtreecommitdiffstats
path: root/taskcluster/taskgraph/transforms
diff options
context:
space:
mode:
authorMatt A. Tobin <mattatobin@localhost.localdomain>2018-02-02 04:16:08 -0500
committerMatt A. Tobin <mattatobin@localhost.localdomain>2018-02-02 04:16:08 -0500
commit5f8de423f190bbb79a62f804151bc24824fa32d8 (patch)
tree10027f336435511475e392454359edea8e25895d /taskcluster/taskgraph/transforms
parent49ee0794b5d912db1f95dce6eb52d781dc210db5 (diff)
downloadUXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.gz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.lz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.xz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.zip
Add m-esr52 at 52.6.0
Diffstat (limited to 'taskcluster/taskgraph/transforms')
-rw-r--r--taskcluster/taskgraph/transforms/__init__.py0
-rw-r--r--taskcluster/taskgraph/transforms/android_stuff.py46
-rw-r--r--taskcluster/taskgraph/transforms/base.py126
-rw-r--r--taskcluster/taskgraph/transforms/build.py31
-rw-r--r--taskcluster/taskgraph/transforms/build_attrs.py33
-rw-r--r--taskcluster/taskgraph/transforms/gecko_v2_whitelist.py77
-rw-r--r--taskcluster/taskgraph/transforms/job/__init__.py164
-rw-r--r--taskcluster/taskgraph/transforms/job/common.py108
-rw-r--r--taskcluster/taskgraph/transforms/job/hazard.py91
-rw-r--r--taskcluster/taskgraph/transforms/job/mach.py30
-rw-r--r--taskcluster/taskgraph/transforms/job/mozharness.py226
-rw-r--r--taskcluster/taskgraph/transforms/job/run_task.py59
-rw-r--r--taskcluster/taskgraph/transforms/job/spidermonkey.py86
-rw-r--r--taskcluster/taskgraph/transforms/job/toolchain.py115
-rw-r--r--taskcluster/taskgraph/transforms/l10n.py44
-rw-r--r--taskcluster/taskgraph/transforms/marionette_harness.py37
-rw-r--r--taskcluster/taskgraph/transforms/task.py648
-rw-r--r--taskcluster/taskgraph/transforms/tests/__init__.py0
-rw-r--r--taskcluster/taskgraph/transforms/tests/all_kinds.py137
-rw-r--r--taskcluster/taskgraph/transforms/tests/android_test.py42
-rw-r--r--taskcluster/taskgraph/transforms/tests/desktop_test.py118
-rw-r--r--taskcluster/taskgraph/transforms/tests/make_task_description.py445
-rw-r--r--taskcluster/taskgraph/transforms/tests/test_description.py235
-rw-r--r--taskcluster/taskgraph/transforms/upload_symbols.py36
24 files changed, 2934 insertions, 0 deletions
diff --git a/taskcluster/taskgraph/transforms/__init__.py b/taskcluster/taskgraph/transforms/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/__init__.py
diff --git a/taskcluster/taskgraph/transforms/android_stuff.py b/taskcluster/taskgraph/transforms/android_stuff.py
new file mode 100644
index 000000000..cb1e0fa5b
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/android_stuff.py
@@ -0,0 +1,46 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Set dynamic task description properties of the android stuff. Temporary!
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def setup_task(config, tasks):
+ for task in tasks:
+ task['label'] = task['name']
+ env = task['worker'].setdefault('env', {})
+ env.update({
+ 'GECKO_BASE_REPOSITORY': config.params['base_repository'],
+ 'GECKO_HEAD_REF': config.params['head_rev'],
+ 'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
+ 'GECKO_HEAD_REV': config.params['head_rev'],
+ 'MOZ_BUILD_DATE': config.params['moz_build_date'],
+ 'MOZ_SCM_LEVEL': config.params['level'],
+ 'MH_BRANCH': config.params['project'],
+ })
+
+ task['worker'].setdefault('caches', []).append({
+ 'type': 'persistent',
+ 'name': 'level-{}-{}-tc-vcs'.format(
+ config.params['level'], config.params['project']),
+ 'mount-point': "/home/worker/.tc-vcs",
+ })
+
+ if int(config.params['level']) > 1:
+ task['worker'].setdefault('caches', []).append({
+ 'type': 'persistent',
+ 'name': 'level-{}-{}-build-{}-workspace'.format(
+ config.params['level'], config.params['project'], task['name']),
+ 'mount-point': "/home/worker/workspace",
+ })
+
+ del task['name']
+ yield task
diff --git a/taskcluster/taskgraph/transforms/base.py b/taskcluster/taskgraph/transforms/base.py
new file mode 100644
index 000000000..aab139252
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/base.py
@@ -0,0 +1,126 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import re
+import pprint
+import voluptuous
+
+
+class TransformConfig(object):
+ """A container for configuration affecting transforms. The `config`
+ argument to transforms is an instance of this class, possibly with
+ additional kind-specific attributes beyond those set here."""
+ def __init__(self, kind, path, config, params):
+ # the name of the current kind
+ self.kind = kind
+
+ # the path to the kind configuration directory
+ self.path = path
+
+ # the parsed contents of kind.yml
+ self.config = config
+
+ # the parameters for this task-graph generation run
+ self.params = params
+
+
+class TransformSequence(object):
+ """
+ Container for a sequence of transforms. Each transform is represented as a
+ callable taking (config, items) and returning a generator which will yield
+ transformed items. The resulting sequence has the same interface.
+
+ This is convenient to use in a file full of transforms, as it provides a
+ decorator, @transforms.add, that will add the decorated function to the
+ sequence.
+ """
+
+ def __init__(self, transforms=None):
+ self.transforms = transforms or []
+
+ def __call__(self, config, items):
+ for xform in self.transforms:
+ items = xform(config, items)
+ if items is None:
+ raise Exception("Transform {} is not a generator".format(xform))
+ return items
+
+ def __repr__(self):
+ return '\n'.join(
+ ['TransformSequence(['] +
+ [repr(x) for x in self.transforms] +
+ ['])'])
+
+ def add(self, func):
+ self.transforms.append(func)
+ return func
+
+
+def validate_schema(schema, obj, msg_prefix):
+ """
+ Validate that object satisfies schema. If not, generate a useful exception
+ beginning with msg_prefix.
+ """
+ try:
+ return schema(obj)
+ except voluptuous.MultipleInvalid as exc:
+ msg = [msg_prefix]
+ for error in exc.errors:
+ msg.append(str(error))
+ raise Exception('\n'.join(msg) + '\n' + pprint.pformat(obj))
+
+
+def get_keyed_by(item, field, item_name, subfield=None):
+ """
+ For values which can either accept a literal value, or be keyed by some
+ other attribute of the item, perform that lookup. For example, this supports
+
+ chunks:
+ by-test-platform:
+ macosx-10.11/debug: 13
+ win.*: 6
+ default: 12
+
+ The `item_name` parameter is used to generate useful error messages.
+ The `subfield` parameter, if specified, allows access to a second level
+ of the item dictionary: item[field][subfield]. For example, this supports
+
+ mozharness:
+ config:
+ by-test-platform:
+ default: ...
+ """
+ value = item[field]
+ if not isinstance(value, dict):
+ return value
+ if subfield:
+ value = item[field][subfield]
+ if not isinstance(value, dict):
+ return value
+
+ assert len(value) == 1, "Invalid attribute {} in {}".format(field, item_name)
+ keyed_by = value.keys()[0]
+ values = value[keyed_by]
+ if keyed_by.startswith('by-'):
+ keyed_by = keyed_by[3:] # extract just the keyed-by field name
+ if item[keyed_by] in values:
+ return values[item[keyed_by]]
+ for k in values.keys():
+ if re.match(k, item[keyed_by]):
+ return values[k]
+ if 'default' in values:
+ return values['default']
+ for k in item[keyed_by], 'default':
+ if k in values:
+ return values[k]
+ else:
+ raise Exception(
+ "Neither {} {} nor 'default' found while determining item {} in {}".format(
+ keyed_by, item[keyed_by], field, item_name))
+ else:
+ raise Exception(
+ "Invalid attribute {} keyed-by value {} in {}".format(
+ field, keyed_by, item_name))
diff --git a/taskcluster/taskgraph/transforms/build.py b/taskcluster/taskgraph/transforms/build.py
new file mode 100644
index 000000000..3875cbbb1
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/build.py
@@ -0,0 +1,31 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Apply some defaults and minor modifications to the jobs defined in the build
+kind.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_defaults(config, jobs):
+ """Set defaults, including those that differ per worker implementation"""
+ for job in jobs:
+ job['treeherder'].setdefault('kind', 'build')
+ job['treeherder'].setdefault('tier', 1)
+ if job['worker']['implementation'] in ('docker-worker', 'docker-engine'):
+ job['worker'].setdefault('docker-image', {'in-tree': 'desktop-build'})
+ job['worker']['chain-of-trust'] = True
+ job.setdefault('extra', {})
+ job['extra'].setdefault('chainOfTrust', {})
+ job['extra']['chainOfTrust'].setdefault('inputs', {})
+ job['extra']['chainOfTrust']['inputs']['docker-image'] = {
+ "task-reference": "<docker-image>"
+ }
+ yield job
diff --git a/taskcluster/taskgraph/transforms/build_attrs.py b/taskcluster/taskgraph/transforms/build_attrs.py
new file mode 100644
index 000000000..56c007614
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/build_attrs.py
@@ -0,0 +1,33 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_build_attributes(config, jobs):
+ """
+ Set the build_platform and build_type attributes based on the job name.
+ Although not all jobs using this transform are actual "builds", the try
+ option syntax treats them as such, and this arranges the attributes
+ appropriately for that purpose.
+ """
+ for job in jobs:
+ build_platform, build_type = job['name'].split('/')
+
+ # pgo builds are represented as a different platform, type opt
+ if build_type == 'pgo':
+ build_platform = build_platform + '-pgo'
+ build_type = 'opt'
+
+ attributes = job.setdefault('attributes', {})
+ attributes.update({
+ 'build_platform': build_platform,
+ 'build_type': build_type,
+ })
+
+ yield job
diff --git a/taskcluster/taskgraph/transforms/gecko_v2_whitelist.py b/taskcluster/taskgraph/transforms/gecko_v2_whitelist.py
new file mode 100644
index 000000000..3817faa50
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/gecko_v2_whitelist.py
@@ -0,0 +1,77 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This file contains a whitelist of gecko.v2 index route job names. The intent
+of this whitelist is to raise an alarm when new jobs are added. If those jobs
+already run in Buildbot, then it's important that the generated index routes
+match (and that only one of Buildbot and TaskCluster be tier-1 at any time).
+If the jobs are new and never ran in Buildbot, then their job name can be added
+here without any further fuss.
+
+Once all jobs have been ported from Buildbot, this file can be removed.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+# please keep me in lexical order
+JOB_NAME_WHITELIST = set([
+ 'android-api-15-debug',
+ 'android-api-15-gradle-dependencies-opt',
+ 'android-api-15-gradle-opt',
+ 'android-api-15-opt',
+ 'android-api-15-nightly-opt',
+ 'android-api-15-partner-sample1-opt',
+ 'android-l10n-opt',
+ 'android-x86-opt',
+ 'aries-debug',
+ 'aries-eng-opt',
+ 'browser-haz-debug',
+ 'linux32-l10n-opt',
+ 'linux64-artifact-opt',
+ 'linux64-asan-debug',
+ 'linux64-asan-opt',
+ 'linux64-ccov-opt',
+ 'linux64-debug',
+ 'linux64-jsdcov-opt',
+ 'linux64-l10n-opt',
+ 'linux64-opt',
+ 'linux64-pgo',
+ 'linux64-st-an-opt',
+ 'linux64-valgrind-opt',
+ 'linux-debug',
+ 'linux-opt',
+ 'linux-pgo',
+ 'macosx64-debug',
+ 'macosx64-opt',
+ 'macosx64-st-an-opt',
+ 'nexus-5-l-eng-debug',
+ 'nexus-5-l-eng-opt',
+ 'osx-10-10',
+ 'shell-haz-debug',
+ 'sm-arm64-sim-debug',
+ 'sm-arm-sim-debug',
+ 'sm-asan-opt',
+ 'sm-compacting-debug',
+ 'sm-mozjs-sys-debug',
+ 'sm-msan-opt',
+ 'sm-nonunified-debug',
+ 'sm-package-opt',
+ 'sm-plaindebug-debug',
+ 'sm-plain-opt',
+ 'sm-rootanalysis-debug',
+ 'sm-tsan-opt',
+ 'win32-debug',
+ 'win32-opt',
+ 'win32-pgo',
+ 'win64-debug',
+ 'win64-opt',
+ 'win64-pgo',
+])
+
+JOB_NAME_WHITELIST_ERROR = """\
+The gecko-v2 job name {} is not in the whitelist in __file__.
+If this job runs on Buildbot, please ensure that the job names match between
+Buildbot and TaskCluster, then add the job name to the whitelist. If this is a
+new job, there is nothing to check -- just add the job to the whitelist.
+"""
diff --git a/taskcluster/taskgraph/transforms/job/__init__.py b/taskcluster/taskgraph/transforms/job/__init__.py
new file mode 100644
index 000000000..a0860c032
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/__init__.py
@@ -0,0 +1,164 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Convert a job description into a task description.
+
+Jobs descriptions are similar to task descriptions, but they specify how to run
+the job at a higher level, using a "run" field that can be interpreted by
+run-using handlers in `taskcluster/taskgraph/transforms/job`.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import copy
+import logging
+import os
+
+from taskgraph.transforms.base import validate_schema, TransformSequence
+from taskgraph.transforms.task import task_description_schema
+from voluptuous import (
+ Optional,
+ Required,
+ Schema,
+ Extra,
+)
+
+logger = logging.getLogger(__name__)
+
+# Voluptuous uses marker objects as dictionary *keys*, but they are not
+# comparable, so we cast all of the keys back to regular strings
+task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
+
+# Schema for a build description
+job_description_schema = Schema({
+ # The name of the job and the job's label. At least one must be specified,
+ # and the label will be generated from the name if necessary, by prepending
+ # the kind.
+ Optional('name'): basestring,
+ Optional('label'): basestring,
+
+ # the following fields are passed directly through to the task description,
+ # possibly modified by the run implementation. See
+ # taskcluster/taskgraph/transforms/task.py for the schema details.
+ Required('description'): task_description_schema['description'],
+ Optional('attributes'): task_description_schema['attributes'],
+ Optional('dependencies'): task_description_schema['dependencies'],
+ Optional('expires-after'): task_description_schema['expires-after'],
+ Optional('routes'): task_description_schema['routes'],
+ Optional('scopes'): task_description_schema['scopes'],
+ Optional('extra'): task_description_schema['extra'],
+ Optional('treeherder'): task_description_schema['treeherder'],
+ Optional('index'): task_description_schema['index'],
+ Optional('run-on-projects'): task_description_schema['run-on-projects'],
+ Optional('coalesce-name'): task_description_schema['coalesce-name'],
+ Optional('worker-type'): task_description_schema['worker-type'],
+ Required('worker'): task_description_schema['worker'],
+ Optional('when'): task_description_schema['when'],
+
+ # A description of how to run this job.
+ 'run': {
+ # The key to a job implementation in a peer module to this one
+ 'using': basestring,
+
+ # Any remaining content is verified against that job implementation's
+ # own schema.
+ Extra: object,
+ },
+})
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def validate(config, jobs):
+ for job in jobs:
+ yield validate_schema(job_description_schema, job,
+ "In job {!r}:".format(job['name']))
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ """Given a build description, create a task description"""
+ # import plugin modules first, before iterating over jobs
+ import_all()
+ for job in jobs:
+ if 'label' not in job:
+ if 'name' not in job:
+ raise Exception("job has neither a name nor a label")
+ job['label'] = '{}-{}'.format(config.kind, job['name'])
+ if job['name']:
+ del job['name']
+
+ taskdesc = copy.deepcopy(job)
+
+ # fill in some empty defaults to make run implementations easier
+ taskdesc.setdefault('attributes', {})
+ taskdesc.setdefault('dependencies', {})
+ taskdesc.setdefault('routes', [])
+ taskdesc.setdefault('scopes', [])
+ taskdesc.setdefault('extra', {})
+
+ # give the function for job.run.using on this worker implementation a
+ # chance to set up the task description.
+ configure_taskdesc_for_run(config, job, taskdesc)
+ del taskdesc['run']
+
+ # yield only the task description, discarding the job description
+ yield taskdesc
+
+# A registry of all functions decorated with run_job_using
+registry = {}
+
+
+def run_job_using(worker_implementation, run_using, schema=None):
+ """Register the decorated function as able to set up a task description for
+ jobs with the given worker implementation and `run.using` property. If
+ `schema` is given, the job's run field will be verified to match it.
+
+ The decorated function should have the signature `using_foo(config, job,
+ taskdesc) and should modify the task description in-place. The skeleton of
+ the task description is already set up, but without a payload."""
+ def wrap(func):
+ for_run_using = registry.setdefault(run_using, {})
+ if worker_implementation in for_run_using:
+ raise Exception("run_job_using({!r}, {!r}) already exists: {!r}".format(
+ run_using, worker_implementation, for_run_using[run_using]))
+ for_run_using[worker_implementation] = (func, schema)
+ return func
+ return wrap
+
+
+def configure_taskdesc_for_run(config, job, taskdesc):
+ """
+ Run the appropriate function for this job against the given task
+ description.
+
+ This will raise an appropriate error if no function exists, or if the job's
+ run is not valid according to the schema.
+ """
+ run_using = job['run']['using']
+ if run_using not in registry:
+ raise Exception("no functions for run.using {!r}".format(run_using))
+
+ worker_implementation = job['worker']['implementation']
+ if worker_implementation not in registry[run_using]:
+ raise Exception("no functions for run.using {!r} on {!r}".format(
+ run_using, worker_implementation))
+
+ func, schema = registry[run_using][worker_implementation]
+ if schema:
+ job['run'] = validate_schema(
+ schema, job['run'],
+ "In job.run using {!r} for job {!r}:".format(
+ job['run']['using'], job['label']))
+
+ func(config, job, taskdesc)
+
+
+def import_all():
+ """Import all modules that are siblings of this one, triggering the decorator
+ above in the process."""
+ for f in os.listdir(os.path.dirname(__file__)):
+ if f.endswith('.py') and f not in ('commmon.py', '__init__.py'):
+ __import__('taskgraph.transforms.job.' + f[:-3])
diff --git a/taskcluster/taskgraph/transforms/job/common.py b/taskcluster/taskgraph/transforms/job/common.py
new file mode 100644
index 000000000..59a51d75a
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/common.py
@@ -0,0 +1,108 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Common support for various job types. These functions are all named after the
+worker implementation they operate on, and take the same three parameters, for
+consistency.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+SECRET_SCOPE = 'secrets:get:project/releng/gecko/{}/level-{}/{}'
+
+
+def docker_worker_add_workspace_cache(config, job, taskdesc):
+ """Add the workspace cache based on the build platform/type and level,
+ except on try where workspace caches are not used."""
+ if config.params['project'] == 'try':
+ return
+
+ taskdesc['worker'].setdefault('caches', []).append({
+ 'type': 'persistent',
+ 'name': 'level-{}-{}-build-{}-{}-workspace'.format(
+ config.params['level'], config.params['project'],
+ taskdesc['attributes']['build_platform'],
+ taskdesc['attributes']['build_type'],
+ ),
+ 'mount-point': "/home/worker/workspace",
+ })
+
+
+def docker_worker_add_tc_vcs_cache(config, job, taskdesc):
+ taskdesc['worker'].setdefault('caches', []).append({
+ 'type': 'persistent',
+ 'name': 'level-{}-{}-tc-vcs'.format(
+ config.params['level'], config.params['project']),
+ 'mount-point': "/home/worker/.tc-vcs",
+ })
+
+
+def docker_worker_add_public_artifacts(config, job, taskdesc):
+ taskdesc['worker'].setdefault('artifacts', []).append({
+ 'name': 'public/build',
+ 'path': '/home/worker/artifacts/',
+ 'type': 'directory',
+ })
+
+
+def docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc):
+ """Add the GECKO_BASE_* and GECKO_HEAD_* env vars to the worker."""
+ env = taskdesc['worker'].setdefault('env', {})
+ env.update({
+ 'GECKO_BASE_REPOSITORY': config.params['base_repository'],
+ 'GECKO_HEAD_REF': config.params['head_rev'],
+ 'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
+ 'GECKO_HEAD_REV': config.params['head_rev'],
+ })
+
+
+def docker_worker_support_vcs_checkout(config, job, taskdesc):
+ """Update a job/task with parameters to enable a VCS checkout.
+
+ The configuration is intended for tasks using "run-task" and its
+ VCS checkout behavior.
+ """
+ level = config.params['level']
+
+ taskdesc['worker'].setdefault('caches', []).append({
+ 'type': 'persistent',
+ # History of versions:
+ #
+ # ``level-%s-checkouts`` was initially used and contained a number
+ # of backwards incompatible changes, such as moving HG_STORE_PATH
+ # from a separate cache to this cache.
+ #
+ # ``v1`` was introduced to provide a clean break from the unversioned
+ # cache.
+ 'name': 'level-%s-checkouts-v1' % level,
+ 'mount-point': '/home/worker/checkouts',
+ })
+
+ taskdesc['worker'].setdefault('env', {}).update({
+ 'GECKO_BASE_REPOSITORY': config.params['base_repository'],
+ 'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
+ 'GECKO_HEAD_REV': config.params['head_rev'],
+ 'HG_STORE_PATH': '/home/worker/checkouts/hg-store',
+ })
+
+ # Give task access to hgfingerprint secret so it can pin the certificate
+ # for hg.mozilla.org.
+ taskdesc['scopes'].append('secrets:get:project/taskcluster/gecko/hgfingerprint')
+ taskdesc['worker']['taskcluster-proxy'] = True
+
+
+def docker_worker_setup_secrets(config, job, taskdesc):
+ """Set up access to secrets via taskcluster-proxy. The value of
+ run['secrets'] should be a boolean or a list of secret names that
+ can be accessed."""
+ if not job['run'].get('secrets'):
+ return
+
+ taskdesc['worker']['taskcluster-proxy'] = True
+ secrets = job['run']['secrets']
+ if secrets is True:
+ secrets = ['*']
+ for sec in secrets:
+ taskdesc['scopes'].append(SECRET_SCOPE.format(
+ job['treeherder']['kind'], config.params['level'], sec))
diff --git a/taskcluster/taskgraph/transforms/job/hazard.py b/taskcluster/taskgraph/transforms/job/hazard.py
new file mode 100644
index 000000000..c5b500843
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/hazard.py
@@ -0,0 +1,91 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running hazard jobs via dedicated scripts
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from voluptuous import Schema, Required, Optional, Any
+
+from taskgraph.transforms.job import run_job_using
+from taskgraph.transforms.job.common import (
+ docker_worker_add_workspace_cache,
+ docker_worker_setup_secrets,
+ docker_worker_add_public_artifacts,
+ docker_worker_support_vcs_checkout,
+)
+
+haz_run_schema = Schema({
+ Required('using'): 'hazard',
+
+ # The command to run within the task image (passed through to the worker)
+ Required('command'): basestring,
+
+ # The tooltool manifest to use; default in the script is used if omitted
+ Optional('tooltool-manifest'): basestring,
+
+ # The mozconfig to use; default in the script is used if omitted
+ Optional('mozconfig'): basestring,
+
+ # The set of secret names to which the task has access; these are prefixed
+ # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting
+ # this will enable any worker features required and set the task's scopes
+ # appropriately. `true` here means ['*'], all secrets. Not supported on
+ # Windows
+ Required('secrets', default=False): Any(bool, [basestring]),
+})
+
+
+@run_job_using("docker-worker", "hazard", schema=haz_run_schema)
+def docker_worker_hazard(config, job, taskdesc):
+ run = job['run']
+
+ worker = taskdesc['worker']
+ worker['artifacts'] = []
+ worker['caches'] = []
+
+ docker_worker_add_public_artifacts(config, job, taskdesc)
+ docker_worker_add_workspace_cache(config, job, taskdesc)
+ docker_worker_setup_secrets(config, job, taskdesc)
+ docker_worker_support_vcs_checkout(config, job, taskdesc)
+
+ env = worker['env']
+ env.update({
+ 'MOZ_BUILD_DATE': config.params['moz_build_date'],
+ 'MOZ_SCM_LEVEL': config.params['level'],
+ })
+
+ # script parameters
+ if run.get('tooltool-manifest'):
+ env['TOOLTOOL_MANIFEST'] = run['tooltool-manifest']
+ if run.get('mozconfig'):
+ env['MOZCONFIG'] = run['mozconfig']
+
+ # tooltool downloads
+ worker['caches'].append({
+ 'type': 'persistent',
+ 'name': 'tooltool-cache',
+ 'mount-point': '/home/worker/tooltool-cache',
+ })
+ worker['relengapi-proxy'] = True
+ taskdesc['scopes'].extend([
+ 'docker-worker:relengapi-proxy:tooltool.download.public',
+ ])
+ env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
+ env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool'
+ env['TOOLTOOL_REV'] = 'master'
+
+ # build-haz-linux.sh needs this otherwise it assumes the checkout is in
+ # the workspace.
+ env['GECKO_DIR'] = '/home/worker/checkouts/gecko'
+
+ worker['command'] = [
+ '/home/worker/bin/run-task',
+ '--chown-recursive', '/home/worker/tooltool-cache',
+ '--chown-recursive', '/home/worker/workspace',
+ '--vcs-checkout', '/home/worker/checkouts/gecko',
+ '--',
+ '/bin/bash', '-c', run['command']
+ ]
diff --git a/taskcluster/taskgraph/transforms/job/mach.py b/taskcluster/taskgraph/transforms/job/mach.py
new file mode 100644
index 000000000..8df202dbc
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/mach.py
@@ -0,0 +1,30 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running mach tasks (via run-task)
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.job import run_job_using
+from taskgraph.transforms.job.run_task import docker_worker_run_task
+from voluptuous import Schema, Required
+
+mach_schema = Schema({
+ Required('using'): 'mach',
+
+ # The mach command (omitting `./mach`) to run
+ Required('mach'): basestring,
+})
+
+
+@run_job_using("docker-worker", "mach", schema=mach_schema)
+def docker_worker_mach(config, job, taskdesc):
+ run = job['run']
+
+ # defer to the run_task implementation
+ run['command'] = 'cd /home/worker/checkouts/gecko && ./mach ' + run['mach']
+ run['checkout'] = True
+ del run['mach']
+ docker_worker_run_task(config, job, taskdesc)
diff --git a/taskcluster/taskgraph/transforms/job/mozharness.py b/taskcluster/taskgraph/transforms/job/mozharness.py
new file mode 100644
index 000000000..fb3cd00dd
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/mozharness.py
@@ -0,0 +1,226 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+
+Support for running jobs via mozharness. Ideally, most stuff gets run this
+way, and certainly anything using mozharness should use this approach.
+
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from voluptuous import Schema, Required, Optional, Any
+
+from taskgraph.transforms.job import run_job_using
+from taskgraph.transforms.job.common import (
+ docker_worker_add_workspace_cache,
+ docker_worker_add_gecko_vcs_env_vars,
+ docker_worker_setup_secrets,
+ docker_worker_add_public_artifacts,
+ docker_worker_support_vcs_checkout,
+)
+
+COALESCE_KEY = 'builds.{project}.{name}'
+
+mozharness_run_schema = Schema({
+ Required('using'): 'mozharness',
+
+ # the mozharness script used to run this task, relative to the testing/
+ # directory and using forward slashes even on Windows
+ Required('script'): basestring,
+
+ # the config files required for the task, relative to
+ # testing/mozharness/configs and using forward slashes even on Windows
+ Required('config'): [basestring],
+
+ # any additional actions to pass to the mozharness command; not supported
+ # on Windows
+ Optional('actions'): [basestring],
+
+ # any additional options (without leading --) to be passed to mozharness;
+ # not supported on Windows
+ Optional('options'): [basestring],
+
+ # --custom-build-variant-cfg value (not supported on Windows)
+ Optional('custom-build-variant-cfg'): basestring,
+
+ # If not false, tooltool downloads will be enabled via relengAPIProxy
+ # for either just public files, or all files. Not supported on Windows
+ Required('tooltool-downloads', default=False): Any(
+ False,
+ 'public',
+ 'internal',
+ ),
+
+ # The set of secret names to which the task has access; these are prefixed
+ # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting
+ # this will enable any worker features required and set the task's scopes
+ # appropriately. `true` here means ['*'], all secrets. Not supported on
+ # Windows
+ Required('secrets', default=False): Any(bool, [basestring]),
+
+ # If true, taskcluster proxy will be enabled; note that it may also be enabled
+ # automatically e.g., for secrets support. Not supported on Windows.
+ Required('taskcluster-proxy', default=False): bool,
+
+ # If true, the build scripts will start Xvfb. Not supported on Windows.
+ Required('need-xvfb', default=False): bool,
+
+ # If false, indicate that builds should skip producing artifacts. Not
+ # supported on Windows.
+ Required('keep-artifacts', default=True): bool,
+
+ # If specified, use the in-tree job script specified.
+ Optional('job-script'): basestring,
+})
+
+
+@run_job_using("docker-worker", "mozharness", schema=mozharness_run_schema)
+def mozharness_on_docker_worker_setup(config, job, taskdesc):
+ run = job['run']
+
+ worker = taskdesc['worker']
+ worker['implementation'] = job['worker']['implementation']
+
+ # running via mozharness assumes desktop-build (which contains build.sh)
+ taskdesc['worker']['docker-image'] = {"in-tree": "desktop-build"}
+
+ worker['relengapi-proxy'] = False # but maybe enabled for tooltool below
+ worker['taskcluster-proxy'] = run.get('taskcluster-proxy')
+
+ docker_worker_add_public_artifacts(config, job, taskdesc)
+ docker_worker_add_workspace_cache(config, job, taskdesc)
+ docker_worker_support_vcs_checkout(config, job, taskdesc)
+
+ env = worker.setdefault('env', {})
+ env.update({
+ 'MOZHARNESS_CONFIG': ' '.join(run['config']),
+ 'MOZHARNESS_SCRIPT': run['script'],
+ 'MH_BRANCH': config.params['project'],
+ 'MH_BUILD_POOL': 'taskcluster',
+ 'MOZ_BUILD_DATE': config.params['moz_build_date'],
+ 'MOZ_SCM_LEVEL': config.params['level'],
+ })
+
+ if 'actions' in run:
+ env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions'])
+
+ if 'options' in run:
+ env['MOZHARNESS_OPTIONS'] = ' '.join(run['options'])
+
+ if 'custom-build-variant-cfg' in run:
+ env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run['custom-build-variant-cfg']
+
+ if 'job-script' in run:
+ env['JOB_SCRIPT'] = run['job-script']
+
+ # if we're not keeping artifacts, set some env variables to empty values
+ # that will cause the build process to skip copying the results to the
+ # artifacts directory. This will have no effect for operations that are
+ # not builds.
+ if not run['keep-artifacts']:
+ env['DIST_TARGET_UPLOADS'] = ''
+ env['DIST_UPLOADS'] = ''
+
+ # Xvfb
+ if run['need-xvfb']:
+ env['NEED_XVFB'] = 'true'
+
+ # tooltool downloads
+ if run['tooltool-downloads']:
+ worker['relengapi-proxy'] = True
+ worker['caches'].append({
+ 'type': 'persistent',
+ 'name': 'tooltool-cache',
+ 'mount-point': '/home/worker/tooltool-cache',
+ })
+ taskdesc['scopes'].extend([
+ 'docker-worker:relengapi-proxy:tooltool.download.public',
+ ])
+ if run['tooltool-downloads'] == 'internal':
+ taskdesc['scopes'].append(
+ 'docker-worker:relengapi-proxy:tooltool.download.internal')
+ env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
+ env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool'
+ env['TOOLTOOL_REV'] = 'master'
+
+ # Retry if mozharness returns TBPL_RETRY
+ worker['retry-exit-status'] = 4
+
+ docker_worker_setup_secrets(config, job, taskdesc)
+
+ command = [
+ '/home/worker/bin/run-task',
+ # Various caches/volumes are default owned by root:root.
+ '--chown-recursive', '/home/worker/workspace',
+ '--chown-recursive', '/home/worker/tooltool-cache',
+ '--vcs-checkout', '/home/worker/workspace/build/src',
+ '--tools-checkout', '/home/worker/workspace/build/tools',
+ '--',
+ ]
+ command.append("/home/worker/workspace/build/src/{}".format(
+ run.get('job-script',
+ "taskcluster/scripts/builder/build-linux.sh"
+ )))
+
+ worker['command'] = command
+
+
+# We use the generic worker to run tasks on Windows
+@run_job_using("generic-worker", "mozharness", schema=mozharness_run_schema)
+def mozharness_on_windows(config, job, taskdesc):
+ run = job['run']
+
+ # fail if invalid run options are included
+ invalid = []
+ for prop in ['actions', 'custom-build-variant-cfg',
+ 'tooltool-downloads', 'secrets', 'taskcluster-proxy',
+ 'need-xvfb']:
+ if prop in run and run[prop]:
+ invalid.append(prop)
+ if not run.get('keep-artifacts', True):
+ invalid.append('keep-artifacts')
+ if invalid:
+ raise Exception("Jobs run using mozharness on Windows do not support properties " +
+ ', '.join(invalid))
+
+ worker = taskdesc['worker']
+
+ worker['artifacts'] = [{
+ 'path': r'public\build',
+ 'type': 'directory',
+ }]
+
+ docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
+
+ env = worker['env']
+ env.update({
+ 'MOZ_BUILD_DATE': config.params['moz_build_date'],
+ 'MOZ_SCM_LEVEL': config.params['level'],
+ 'TOOLTOOL_REPO': 'https://github.com/mozilla/build-tooltool',
+ 'TOOLTOOL_REV': 'master',
+ })
+
+ mh_command = [r'c:\mozilla-build\python\python.exe']
+ mh_command.append('\\'.join([r'.\build\src\testing', run['script'].replace('/', '\\')]))
+ for cfg in run['config']:
+ mh_command.append('--config ' + cfg.replace('/', '\\'))
+ mh_command.append('--branch ' + config.params['project'])
+ mh_command.append(r'--skip-buildbot-actions --work-dir %cd:Z:=z:%\build')
+ for option in run.get('options', []):
+ mh_command.append('--' + option)
+
+ hg_command = ['"c:\\Program Files\\Mercurial\\hg.exe"']
+ hg_command.append('robustcheckout')
+ hg_command.extend(['--sharebase', 'y:\\hg-shared'])
+ hg_command.append('--purge')
+ hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified'])
+ hg_command.extend(['--revision', env['GECKO_HEAD_REV']])
+ hg_command.append(env['GECKO_HEAD_REPOSITORY'])
+ hg_command.append('.\\build\\src')
+
+ worker['command'] = [
+ ' '.join(hg_command),
+ ' '.join(mh_command)
+ ]
diff --git a/taskcluster/taskgraph/transforms/job/run_task.py b/taskcluster/taskgraph/transforms/job/run_task.py
new file mode 100644
index 000000000..296fe43ee
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/run_task.py
@@ -0,0 +1,59 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running jobs that are invoked via the `run-task` script.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import copy
+
+from taskgraph.transforms.job import run_job_using
+from taskgraph.transforms.job.common import (
+ docker_worker_support_vcs_checkout,
+)
+from voluptuous import Schema, Required, Any
+
+run_task_schema = Schema({
+ Required('using'): 'run-task',
+
+ # if true, add a cache at ~worker/.cache, which is where things like pip
+ # tend to hide their caches. This cache is never added for level-1 jobs.
+ Required('cache-dotcache', default=False): bool,
+
+ # if true (the default), perform a checkout in /home/worker/checkouts/gecko
+ Required('checkout', default=True): bool,
+
+ # The command arguments to pass to the `run-task` script, after the
+ # checkout arguments. If a list, it will be passed directly; otherwise
+ # it will be included in a single argument to `bash -cx`.
+ Required('command'): Any([basestring], basestring),
+})
+
+
+@run_job_using("docker-worker", "run-task", schema=run_task_schema)
+def docker_worker_run_task(config, job, taskdesc):
+ run = job['run']
+
+ worker = taskdesc['worker'] = copy.deepcopy(job['worker'])
+
+ if run['checkout']:
+ docker_worker_support_vcs_checkout(config, job, taskdesc)
+
+ if run.get('cache-dotcache') and int(config.params['level']) > 1:
+ worker['caches'].append({
+ 'type': 'persistent',
+ 'name': 'level-{level}-{project}-dotcache'.format(**config.params),
+ 'mount-point': '/home/worker/.cache',
+ })
+
+ run_command = run['command']
+ if isinstance(run_command, basestring):
+ run_command = ['bash', '-cx', run_command]
+ command = ['/home/worker/bin/run-task']
+ if run['checkout']:
+ command.append('--vcs-checkout=/home/worker/checkouts/gecko')
+ command.append('--')
+ command.extend(run_command)
+ worker['command'] = command
diff --git a/taskcluster/taskgraph/transforms/job/spidermonkey.py b/taskcluster/taskgraph/transforms/job/spidermonkey.py
new file mode 100644
index 000000000..d78b78504
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/spidermonkey.py
@@ -0,0 +1,86 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running spidermonkey jobs via dedicated scripts
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from voluptuous import Schema, Required, Optional, Any
+
+from taskgraph.transforms.job import run_job_using
+from taskgraph.transforms.job.common import (
+ docker_worker_add_public_artifacts,
+ docker_worker_support_vcs_checkout,
+)
+
+sm_run_schema = Schema({
+ Required('using'): Any('spidermonkey', 'spidermonkey-package', 'spidermonkey-mozjs-crate'),
+
+ # The SPIDERMONKEY_VARIANT
+ Required('spidermonkey-variant'): basestring,
+
+ # The tooltool manifest to use; default from sm-tooltool-config.sh is used
+ # if omitted
+ Optional('tooltool-manifest'): basestring,
+})
+
+
+@run_job_using("docker-worker", "spidermonkey")
+@run_job_using("docker-worker", "spidermonkey-package")
+@run_job_using("docker-worker", "spidermonkey-mozjs-crate")
+def docker_worker_spidermonkey(config, job, taskdesc, schema=sm_run_schema):
+ run = job['run']
+
+ worker = taskdesc['worker']
+ worker['artifacts'] = []
+ worker['caches'] = []
+
+ if int(config.params['level']) > 1:
+ worker['caches'].append({
+ 'type': 'persistent',
+ 'name': 'level-{}-{}-build-spidermonkey-workspace'.format(
+ config.params['level'], config.params['project']),
+ 'mount-point': "/home/worker/workspace",
+ })
+
+ docker_worker_add_public_artifacts(config, job, taskdesc)
+
+ env = worker['env']
+ env.update({
+ 'MOZHARNESS_DISABLE': 'true',
+ 'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'],
+ 'MOZ_BUILD_DATE': config.params['moz_build_date'],
+ 'MOZ_SCM_LEVEL': config.params['level'],
+ })
+
+ # tooltool downloads; note that this script downloads using the API
+ # endpoiint directly, rather than via relengapi-proxy
+ worker['caches'].append({
+ 'type': 'persistent',
+ 'name': 'tooltool-cache',
+ 'mount-point': '/home/worker/tooltool-cache',
+ })
+ env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
+ if run.get('tooltool-manifest'):
+ env['TOOLTOOL_MANIFEST'] = run['tooltool-manifest']
+
+ docker_worker_support_vcs_checkout(config, job, taskdesc)
+
+ script = "build-sm.sh"
+ if run['using'] == 'spidermonkey-package':
+ script = "build-sm-package.sh"
+ elif run['using'] == 'spidermonkey-mozjs-crate':
+ script = "build-sm-mozjs-crate.sh"
+
+ worker['command'] = [
+ '/home/worker/bin/run-task',
+ '--chown-recursive', '/home/worker/workspace',
+ '--chown-recursive', '/home/worker/tooltool-cache',
+ '--vcs-checkout', '/home/worker/workspace/build/src',
+ '--',
+ '/bin/bash',
+ '-c',
+ 'cd /home/worker && workspace/build/src/taskcluster/scripts/builder/%s' % script
+ ]
diff --git a/taskcluster/taskgraph/transforms/job/toolchain.py b/taskcluster/taskgraph/transforms/job/toolchain.py
new file mode 100644
index 000000000..d814f7824
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/toolchain.py
@@ -0,0 +1,115 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running toolchain-building jobs via dedicated scripts
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from voluptuous import Schema, Required
+
+from taskgraph.transforms.job import run_job_using
+from taskgraph.transforms.job.common import (
+ docker_worker_add_tc_vcs_cache,
+ docker_worker_add_gecko_vcs_env_vars
+)
+
+toolchain_run_schema = Schema({
+ Required('using'): 'toolchain-script',
+
+ # the script (in taskcluster/scripts/misc) to run
+ Required('script'): basestring,
+})
+
+
+@run_job_using("docker-worker", "toolchain-script", schema=toolchain_run_schema)
+def docker_worker_toolchain(config, job, taskdesc):
+ run = job['run']
+
+ worker = taskdesc['worker']
+ worker['artifacts'] = []
+ worker['caches'] = []
+
+ worker['artifacts'].append({
+ 'name': 'public',
+ 'path': '/home/worker/workspace/artifacts/',
+ 'type': 'directory',
+ })
+
+ docker_worker_add_tc_vcs_cache(config, job, taskdesc)
+ docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
+
+ env = worker['env']
+ env.update({
+ 'MOZ_BUILD_DATE': config.params['moz_build_date'],
+ 'MOZ_SCM_LEVEL': config.params['level'],
+ 'TOOLS_DISABLE': 'true',
+ })
+
+ # tooltool downloads; note that this downloads using the API endpoint directly,
+ # rather than via relengapi-proxy
+ worker['caches'].append({
+ 'type': 'persistent',
+ 'name': 'tooltool-cache',
+ 'mount-point': '/home/worker/tooltool-cache',
+ })
+ env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
+ env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool'
+ env['TOOLTOOL_REV'] = 'master'
+
+ command = ' && '.join([
+ "cd /home/worker/",
+ "./bin/checkout-sources.sh",
+ "./workspace/build/src/taskcluster/scripts/misc/" + run['script'],
+ ])
+ worker['command'] = ["/bin/bash", "-c", command]
+
+
+@run_job_using("generic-worker", "toolchain-script", schema=toolchain_run_schema)
+def windows_toolchain(config, job, taskdesc):
+ run = job['run']
+
+ worker = taskdesc['worker']
+
+ worker['artifacts'] = [{
+ 'path': r'public\build',
+ 'type': 'directory',
+ }]
+
+ docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
+
+ # We fetch LLVM SVN into this.
+ svn_cache = 'level-{}-toolchain-clang-cl-build-svn'.format(config.params['level'])
+ worker['mounts'] = [{
+ 'cache-name': svn_cache,
+ 'path': r'llvm-sources',
+ }]
+ taskdesc['scopes'].extend([
+ 'generic-worker:cache:' + svn_cache,
+ ])
+
+ env = worker['env']
+ env.update({
+ 'MOZ_BUILD_DATE': config.params['moz_build_date'],
+ 'MOZ_SCM_LEVEL': config.params['level'],
+ 'TOOLTOOL_REPO': 'https://github.com/mozilla/build-tooltool',
+ 'TOOLTOOL_REV': 'master',
+ })
+
+ hg = r'c:\Program Files\Mercurial\hg.exe'
+ hg_command = ['"{}"'.format(hg)]
+ hg_command.append('robustcheckout')
+ hg_command.extend(['--sharebase', 'y:\\hg-shared'])
+ hg_command.append('--purge')
+ hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified'])
+ hg_command.extend(['--revision', '%GECKO_HEAD_REV%'])
+ hg_command.append('%GECKO_HEAD_REPOSITORY%')
+ hg_command.append('.\\build\\src')
+
+ bash = r'c:\mozilla-build\msys\bin\bash'
+ worker['command'] = [
+ ' '.join(hg_command),
+ # do something intelligent.
+ r'{} -c ./build/src/taskcluster/scripts/misc/{}'.format(bash, run['script'])
+ ]
diff --git a/taskcluster/taskgraph/transforms/l10n.py b/taskcluster/taskgraph/transforms/l10n.py
new file mode 100644
index 000000000..42137b558
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/l10n.py
@@ -0,0 +1,44 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Do transforms specific to l10n kind
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def mh_config_replace_project(config, jobs):
+ """ Replaces {project} in mh config entries with the current project """
+ # XXXCallek This is a bad pattern but exists to satisfy ease-of-porting for buildbot
+ for job in jobs:
+ if not job['run'].get('using') == 'mozharness':
+ # Nothing to do, not mozharness
+ yield job
+ continue
+ job['run']['config'] = map(
+ lambda x: x.format(project=config.params['project']),
+ job['run']['config']
+ )
+ yield job
+
+
+@transforms.add
+def mh_options_replace_project(config, jobs):
+ """ Replaces {project} in mh option entries with the current project """
+ # XXXCallek This is a bad pattern but exists to satisfy ease-of-porting for buildbot
+ for job in jobs:
+ if not job['run'].get('using') == 'mozharness':
+ # Nothing to do, not mozharness
+ yield job
+ continue
+ job['run']['options'] = map(
+ lambda x: x.format(project=config.params['project']),
+ job['run']['options']
+ )
+ yield job
diff --git a/taskcluster/taskgraph/transforms/marionette_harness.py b/taskcluster/taskgraph/transforms/marionette_harness.py
new file mode 100644
index 000000000..a24db470c
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/marionette_harness.py
@@ -0,0 +1,37 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Set dynamic task description properties of the marionette-harness task.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def setup_task(config, tasks):
+ for task in tasks:
+ del task['name']
+ task['label'] = 'marionette-harness'
+ env = task['worker'].setdefault('env', {})
+ env.update({
+ 'GECKO_BASE_REPOSITORY': config.params['base_repository'],
+ 'GECKO_HEAD_REF': config.params['head_rev'],
+ 'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
+ 'GECKO_HEAD_REV': config.params['head_rev'],
+ 'MOZ_BUILD_DATE': config.params['moz_build_date'],
+ 'MOZ_SCM_LEVEL': config.params['level'],
+ })
+
+ task['worker']['caches'] = [{
+ 'type': 'persistent',
+ 'name': 'level-{}-{}-tc-vcs'.format(
+ config.params['level'], config.params['project']),
+ 'mount-point': "/home/worker/.tc-vcs",
+ }]
+
+ yield task
diff --git a/taskcluster/taskgraph/transforms/task.py b/taskcluster/taskgraph/transforms/task.py
new file mode 100644
index 000000000..6e371e4ba
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -0,0 +1,648 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+These transformations take a task description and turn it into a TaskCluster
+task definition (along with attributes, label, etc.). The input to these
+transformations is generic to any kind of task, but abstracts away some of the
+complexities of worker implementations, scopes, and treeherder annotations.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import json
+import time
+
+from taskgraph.util.treeherder import split_symbol
+from taskgraph.transforms.base import (
+ validate_schema,
+ TransformSequence
+)
+from voluptuous import Schema, Any, Required, Optional, Extra
+
+from .gecko_v2_whitelist import JOB_NAME_WHITELIST, JOB_NAME_WHITELIST_ERROR
+
+# shortcut for a string where task references are allowed
+taskref_or_string = Any(
+ basestring,
+ {Required('task-reference'): basestring})
+
+# A task description is a general description of a TaskCluster task
+task_description_schema = Schema({
+ # the label for this task
+ Required('label'): basestring,
+
+ # description of the task (for metadata)
+ Required('description'): basestring,
+
+ # attributes for this task
+ Optional('attributes'): {basestring: object},
+
+ # dependencies of this task, keyed by name; these are passed through
+ # verbatim and subject to the interpretation of the Task's get_dependencies
+ # method.
+ Optional('dependencies'): {basestring: object},
+
+ # expiration and deadline times, relative to task creation, with units
+ # (e.g., "14 days"). Defaults are set based on the project.
+ Optional('expires-after'): basestring,
+ Optional('deadline-after'): basestring,
+
+ # custom routes for this task; the default treeherder routes will be added
+ # automatically
+ Optional('routes'): [basestring],
+
+ # custom scopes for this task; any scopes required for the worker will be
+ # added automatically
+ Optional('scopes'): [basestring],
+
+ # custom "task.extra" content
+ Optional('extra'): {basestring: object},
+
+ # treeherder-related information; see
+ # https://schemas.taskcluster.net/taskcluster-treeherder/v1/task-treeherder-config.json
+ # If not specified, no treeherder extra information or routes will be
+ # added to the task
+ Optional('treeherder'): {
+ # either a bare symbol, or "grp(sym)".
+ 'symbol': basestring,
+
+ # the job kind
+ 'kind': Any('build', 'test', 'other'),
+
+ # tier for this task
+ 'tier': int,
+
+ # task platform, in the form platform/collection, used to set
+ # treeherder.machine.platform and treeherder.collection or
+ # treeherder.labels
+ 'platform': basestring,
+
+ # treeherder environments (defaults to both staging and production)
+ Required('environments', default=['production', 'staging']): ['production', 'staging'],
+ },
+
+ # information for indexing this build so its artifacts can be discovered;
+ # if omitted, the build will not be indexed.
+ Optional('index'): {
+ # the name of the product this build produces
+ 'product': Any('firefox', 'mobile'),
+
+ # the names to use for this job in the TaskCluster index
+ 'job-name': Any(
+ # Assuming the job is named "normally", this is the v2 job name,
+ # and the v1 and buildbot routes will be determined appropriately.
+ basestring,
+
+ # otherwise, give separate names for each of the legacy index
+ # routes; if a name is omitted, no corresponding route will be
+ # created.
+ {
+ # the name as it appears in buildbot routes
+ Optional('buildbot'): basestring,
+ Required('gecko-v2'): basestring,
+ }
+ ),
+
+ # The rank that the task will receive in the TaskCluster
+ # index. A newly completed task supercedes the currently
+ # indexed task iff it has a higher rank. If unspecified,
+ # 'by-tier' behavior will be used.
+ 'rank': Any(
+ # Rank is equal the timestamp of the build_date for tier-1
+ # tasks, and zero for non-tier-1. This sorts tier-{2,3}
+ # builds below tier-1 in the index.
+ 'by-tier',
+
+ # Rank is given as an integer constant (e.g. zero to make
+ # sure a task is last in the index).
+ int,
+
+ # Rank is equal to the timestamp of the build_date. This
+ # option can be used to override the 'by-tier' behavior
+ # for non-tier-1 tasks.
+ 'build_date',
+ ),
+ },
+
+ # The `run_on_projects` attribute, defaulting to "all". This dictates the
+ # projects on which this task should be included in the target task set.
+ # See the attributes documentation for details.
+ Optional('run-on-projects'): [basestring],
+
+ # If the task can be coalesced, this is the name used in the coalesce key
+ # the project, etc. will be added automatically. Note that try (level 1)
+ # tasks are never coalesced
+ Optional('coalesce-name'): basestring,
+
+ # the provisioner-id/worker-type for the task. The following parameters will
+ # be substituted in this string:
+ # {level} -- the scm level of this push
+ 'worker-type': basestring,
+
+ # information specific to the worker implementation that will run this task
+ 'worker': Any({
+ Required('implementation'): Any('docker-worker', 'docker-engine'),
+
+ # For tasks that will run in docker-worker or docker-engine, this is the
+ # name of the docker image or in-tree docker image to run the task in. If
+ # in-tree, then a dependency will be created automatically. This is
+ # generally `desktop-test`, or an image that acts an awful lot like it.
+ Required('docker-image'): Any(
+ # a raw Docker image path (repo/image:tag)
+ basestring,
+ # an in-tree generated docker image (from `testing/docker/<name>`)
+ {'in-tree': basestring}
+ ),
+
+ # worker features that should be enabled
+ Required('relengapi-proxy', default=False): bool,
+ Required('chain-of-trust', default=False): bool,
+ Required('taskcluster-proxy', default=False): bool,
+ Required('allow-ptrace', default=False): bool,
+ Required('loopback-video', default=False): bool,
+ Required('loopback-audio', default=False): bool,
+
+ # caches to set up for the task
+ Optional('caches'): [{
+ # only one type is supported by any of the workers right now
+ 'type': 'persistent',
+
+ # name of the cache, allowing re-use by subsequent tasks naming the
+ # same cache
+ 'name': basestring,
+
+ # location in the task image where the cache will be mounted
+ 'mount-point': basestring,
+ }],
+
+ # artifacts to extract from the task image after completion
+ Optional('artifacts'): [{
+ # type of artifact -- simple file, or recursive directory
+ 'type': Any('file', 'directory'),
+
+ # task image path from which to read artifact
+ 'path': basestring,
+
+ # name of the produced artifact (root of the names for
+ # type=directory)
+ 'name': basestring,
+ }],
+
+ # environment variables
+ Required('env', default={}): {basestring: taskref_or_string},
+
+ # the command to run
+ 'command': [taskref_or_string],
+
+ # the maximum time to run, in seconds
+ 'max-run-time': int,
+
+ # the exit status code that indicates the task should be retried
+ Optional('retry-exit-status'): int,
+
+ }, {
+ Required('implementation'): 'generic-worker',
+
+ # command is a list of commands to run, sequentially
+ 'command': [taskref_or_string],
+
+ # artifacts to extract from the task image after completion; note that artifacts
+ # for the generic worker cannot have names
+ Optional('artifacts'): [{
+ # type of artifact -- simple file, or recursive directory
+ 'type': Any('file', 'directory'),
+
+ # task image path from which to read artifact
+ 'path': basestring,
+ }],
+
+ # directories and/or files to be mounted
+ Optional('mounts'): [{
+ # a unique name for the cache volume
+ 'cache-name': basestring,
+
+ # task image path for the cache
+ 'path': basestring,
+ }],
+
+ # environment variables
+ Required('env', default={}): {basestring: taskref_or_string},
+
+ # the maximum time to run, in seconds
+ 'max-run-time': int,
+
+ # os user groups for test task workers
+ Optional('os-groups', default=[]): [basestring],
+ }, {
+ Required('implementation'): 'buildbot-bridge',
+
+ # see
+ # https://github.com/mozilla/buildbot-bridge/blob/master/bbb/schemas/payload.yml
+ 'buildername': basestring,
+ 'sourcestamp': {
+ 'branch': basestring,
+ Optional('revision'): basestring,
+ Optional('repository'): basestring,
+ Optional('project'): basestring,
+ },
+ 'properties': {
+ 'product': basestring,
+ Extra: basestring, # additional properties are allowed
+ },
+ }, {
+ 'implementation': 'macosx-engine',
+
+ # A link for an executable to download
+ Optional('link'): basestring,
+
+ # the command to run
+ Required('command'): [taskref_or_string],
+
+ # environment variables
+ Optional('env'): {basestring: taskref_or_string},
+
+ # artifacts to extract from the task image after completion
+ Optional('artifacts'): [{
+ # type of artifact -- simple file, or recursive directory
+ Required('type'): Any('file', 'directory'),
+
+ # task image path from which to read artifact
+ Required('path'): basestring,
+
+ # name of the produced artifact (root of the names for
+ # type=directory)
+ Required('name'): basestring,
+ }],
+ }),
+
+ # The "when" section contains descriptions of the circumstances
+ # under which this task can be "optimized", that is, left out of the
+ # task graph because it is unnecessary.
+ Optional('when'): Any({
+ # This task only needs to be run if a file matching one of the given
+ # patterns has changed in the push. The patterns use the mozpack
+ # match function (python/mozbuild/mozpack/path.py).
+ Optional('files-changed'): [basestring],
+ }),
+})
+
+GROUP_NAMES = {
+ 'tc': 'Executed by TaskCluster',
+ 'tc-e10s': 'Executed by TaskCluster with e10s',
+ 'tc-Fxfn-l': 'Firefox functional tests (local) executed by TaskCluster',
+ 'tc-Fxfn-l-e10s': 'Firefox functional tests (local) executed by TaskCluster with e10s',
+ 'tc-Fxfn-r': 'Firefox functional tests (remote) executed by TaskCluster',
+ 'tc-Fxfn-r-e10s': 'Firefox functional tests (remote) executed by TaskCluster with e10s',
+ 'tc-M': 'Mochitests executed by TaskCluster',
+ 'tc-M-e10s': 'Mochitests executed by TaskCluster with e10s',
+ 'tc-R': 'Reftests executed by TaskCluster',
+ 'tc-R-e10s': 'Reftests executed by TaskCluster with e10s',
+ 'tc-VP': 'VideoPuppeteer tests executed by TaskCluster',
+ 'tc-W': 'Web platform tests executed by TaskCluster',
+ 'tc-W-e10s': 'Web platform tests executed by TaskCluster with e10s',
+ 'tc-X': 'Xpcshell tests executed by TaskCluster',
+ 'tc-X-e10s': 'Xpcshell tests executed by TaskCluster with e10s',
+ 'Aries': 'Aries Device Image',
+ 'Nexus 5-L': 'Nexus 5-L Device Image',
+ 'Cc': 'Toolchain builds',
+ 'SM-tc': 'Spidermonkey builds',
+}
+UNKNOWN_GROUP_NAME = "Treeherder group {} has no name; add it to " + __file__
+
+BUILDBOT_ROUTE_TEMPLATES = [
+ "index.buildbot.branches.{project}.{job-name-buildbot}",
+ "index.buildbot.revisions.{head_rev}.{project}.{job-name-buildbot}",
+]
+
+V2_ROUTE_TEMPLATES = [
+ "index.gecko.v2.{project}.latest.{product}.{job-name-gecko-v2}",
+ "index.gecko.v2.{project}.pushdate.{build_date_long}.{product}.{job-name-gecko-v2}",
+ "index.gecko.v2.{project}.revision.{head_rev}.{product}.{job-name-gecko-v2}",
+]
+
+# the roots of the treeherder routes, keyed by treeherder environment
+TREEHERDER_ROUTE_ROOTS = {
+ 'production': 'tc-treeherder',
+ 'staging': 'tc-treeherder-stage',
+}
+
+COALESCE_KEY = 'builds.{project}.{name}'
+
+# define a collection of payload builders, depending on the worker implementation
+payload_builders = {}
+
+
+def payload_builder(name):
+ def wrap(func):
+ payload_builders[name] = func
+ return func
+ return wrap
+
+
+@payload_builder('docker-worker')
+def build_docker_worker_payload(config, task, task_def):
+ worker = task['worker']
+
+ image = worker['docker-image']
+ if isinstance(image, dict):
+ docker_image_task = 'build-docker-image-' + image['in-tree']
+ task.setdefault('dependencies', {})['docker-image'] = docker_image_task
+ image = {
+ "path": "public/image.tar.zst",
+ "taskId": {"task-reference": "<docker-image>"},
+ "type": "task-image",
+ }
+
+ features = {}
+
+ if worker.get('relengapi-proxy'):
+ features['relengAPIProxy'] = True
+
+ if worker.get('taskcluster-proxy'):
+ features['taskclusterProxy'] = True
+
+ if worker.get('allow-ptrace'):
+ features['allowPtrace'] = True
+ task_def['scopes'].append('docker-worker:feature:allowPtrace')
+
+ if worker.get('chain-of-trust'):
+ features['chainOfTrust'] = True
+
+ capabilities = {}
+
+ for lo in 'audio', 'video':
+ if worker.get('loopback-' + lo):
+ capitalized = 'loopback' + lo.capitalize()
+ devices = capabilities.setdefault('devices', {})
+ devices[capitalized] = True
+ task_def['scopes'].append('docker-worker:capability:device:' + capitalized)
+
+ task_def['payload'] = payload = {
+ 'command': worker['command'],
+ 'image': image,
+ 'env': worker['env'],
+ }
+
+ if 'max-run-time' in worker:
+ payload['maxRunTime'] = worker['max-run-time']
+
+ if 'retry-exit-status' in worker:
+ payload['onExitStatus'] = {'retry': [worker['retry-exit-status']]}
+
+ if 'artifacts' in worker:
+ artifacts = {}
+ for artifact in worker['artifacts']:
+ artifacts[artifact['name']] = {
+ 'path': artifact['path'],
+ 'type': artifact['type'],
+ 'expires': task_def['expires'], # always expire with the task
+ }
+ payload['artifacts'] = artifacts
+
+ if 'caches' in worker:
+ caches = {}
+ for cache in worker['caches']:
+ caches[cache['name']] = cache['mount-point']
+ task_def['scopes'].append('docker-worker:cache:' + cache['name'])
+ payload['cache'] = caches
+
+ if features:
+ payload['features'] = features
+ if capabilities:
+ payload['capabilities'] = capabilities
+
+ # coalesce / superseding
+ if 'coalesce-name' in task and int(config.params['level']) > 1:
+ key = COALESCE_KEY.format(
+ project=config.params['project'],
+ name=task['coalesce-name'])
+ payload['supersederUrl'] = "https://coalesce.mozilla-releng.net/v1/list/" + key
+
+
+@payload_builder('generic-worker')
+def build_generic_worker_payload(config, task, task_def):
+ worker = task['worker']
+
+ artifacts = []
+
+ for artifact in worker['artifacts']:
+ artifacts.append({
+ 'path': artifact['path'],
+ 'type': artifact['type'],
+ 'expires': task_def['expires'], # always expire with the task
+ })
+
+ mounts = []
+
+ for mount in worker.get('mounts', []):
+ mounts.append({
+ 'cacheName': mount['cache-name'],
+ 'directory': mount['path']
+ })
+
+ task_def['payload'] = {
+ 'command': worker['command'],
+ 'artifacts': artifacts,
+ 'env': worker.get('env', {}),
+ 'mounts': mounts,
+ 'maxRunTime': worker['max-run-time'],
+ 'osGroups': worker.get('os-groups', []),
+ }
+
+ if 'retry-exit-status' in worker:
+ raise Exception("retry-exit-status not supported in generic-worker")
+
+
+@payload_builder('macosx-engine')
+def build_macosx_engine_payload(config, task, task_def):
+ worker = task['worker']
+ artifacts = map(lambda artifact: {
+ 'name': artifact['name'],
+ 'path': artifact['path'],
+ 'type': artifact['type'],
+ 'expires': task_def['expires'],
+ }, worker['artifacts'])
+
+ task_def['payload'] = {
+ 'link': worker['link'],
+ 'command': worker['command'],
+ 'env': worker['env'],
+ 'artifacts': artifacts,
+ }
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def validate(config, tasks):
+ for task in tasks:
+ yield validate_schema(
+ task_description_schema, task,
+ "In task {!r}:".format(task.get('label', '?no-label?')))
+
+
+@transforms.add
+def add_index_routes(config, tasks):
+ for task in tasks:
+ index = task.get('index')
+ routes = task.setdefault('routes', [])
+
+ if not index:
+ yield task
+ continue
+
+ job_name = index['job-name']
+ # unpack the v2 name to v1 and buildbot names
+ if isinstance(job_name, basestring):
+ base_name, type_name = job_name.rsplit('-', 1)
+ job_name = {
+ 'buildbot': base_name,
+ 'gecko-v2': '{}-{}'.format(base_name, type_name),
+ }
+
+ if job_name['gecko-v2'] not in JOB_NAME_WHITELIST:
+ raise Exception(JOB_NAME_WHITELIST_ERROR.format(job_name['gecko-v2']))
+
+ subs = config.params.copy()
+ for n in job_name:
+ subs['job-name-' + n] = job_name[n]
+ subs['build_date_long'] = time.strftime("%Y.%m.%d.%Y%m%d%H%M%S",
+ time.gmtime(config.params['build_date']))
+ subs['product'] = index['product']
+
+ if 'buildbot' in job_name:
+ for tpl in BUILDBOT_ROUTE_TEMPLATES:
+ routes.append(tpl.format(**subs))
+ if 'gecko-v2' in job_name:
+ for tpl in V2_ROUTE_TEMPLATES:
+ routes.append(tpl.format(**subs))
+
+ # The default behavior is to rank tasks according to their tier
+ extra_index = task.setdefault('extra', {}).setdefault('index', {})
+ rank = index.get('rank', 'by-tier')
+
+ if rank == 'by-tier':
+ # rank is zero for non-tier-1 tasks and based on pushid for others;
+ # this sorts tier-{2,3} builds below tier-1 in the index
+ tier = task.get('treeherder', {}).get('tier', 3)
+ extra_index['rank'] = 0 if tier > 1 else int(config.params['build_date'])
+ elif rank == 'build_date':
+ extra_index['rank'] = int(config.params['build_date'])
+ else:
+ extra_index['rank'] = rank
+
+ del task['index']
+ yield task
+
+
+@transforms.add
+def build_task(config, tasks):
+ for task in tasks:
+ worker_type = task['worker-type'].format(level=str(config.params['level']))
+ provisioner_id, worker_type = worker_type.split('/', 1)
+
+ routes = task.get('routes', [])
+ scopes = task.get('scopes', [])
+
+ # set up extra
+ extra = task.get('extra', {})
+ task_th = task.get('treeherder')
+ if task_th:
+ extra['treeherderEnv'] = task_th['environments']
+
+ treeherder = extra.setdefault('treeherder', {})
+
+ machine_platform, collection = task_th['platform'].split('/', 1)
+ treeherder['machine'] = {'platform': machine_platform}
+ treeherder['collection'] = {collection: True}
+
+ groupSymbol, symbol = split_symbol(task_th['symbol'])
+ if groupSymbol != '?':
+ treeherder['groupSymbol'] = groupSymbol
+ if groupSymbol not in GROUP_NAMES:
+ raise Exception(UNKNOWN_GROUP_NAME.format(groupSymbol))
+ treeherder['groupName'] = GROUP_NAMES[groupSymbol]
+ treeherder['symbol'] = symbol
+ treeherder['jobKind'] = task_th['kind']
+ treeherder['tier'] = task_th['tier']
+
+ routes.extend([
+ '{}.v2.{}.{}.{}'.format(TREEHERDER_ROUTE_ROOTS[env],
+ config.params['project'],
+ config.params['head_rev'],
+ config.params['pushlog_id'])
+ for env in task_th['environments']
+ ])
+
+ if 'expires-after' not in task:
+ task['expires-after'] = '28 days' if config.params['project'] == 'try' else '1 year'
+
+ if 'deadline-after' not in task:
+ task['deadline-after'] = '1 day'
+
+ if 'coalesce-name' in task and int(config.params['level']) > 1:
+ key = COALESCE_KEY.format(
+ project=config.params['project'],
+ name=task['coalesce-name'])
+ routes.append('coalesce.v1.' + key)
+
+ task_def = {
+ 'provisionerId': provisioner_id,
+ 'workerType': worker_type,
+ 'routes': routes,
+ 'created': {'relative-datestamp': '0 seconds'},
+ 'deadline': {'relative-datestamp': task['deadline-after']},
+ 'expires': {'relative-datestamp': task['expires-after']},
+ 'scopes': scopes,
+ 'metadata': {
+ 'description': task['description'],
+ 'name': task['label'],
+ 'owner': config.params['owner'],
+ 'source': '{}/file/{}/{}'.format(
+ config.params['head_repository'],
+ config.params['head_rev'],
+ config.path),
+ },
+ 'extra': extra,
+ 'tags': {'createdForUser': config.params['owner']},
+ }
+
+ # add the payload and adjust anything else as required (e.g., scopes)
+ payload_builders[task['worker']['implementation']](config, task, task_def)
+
+ attributes = task.get('attributes', {})
+ attributes['run_on_projects'] = task.get('run-on-projects', ['all'])
+
+ yield {
+ 'label': task['label'],
+ 'task': task_def,
+ 'dependencies': task.get('dependencies', {}),
+ 'attributes': attributes,
+ 'when': task.get('when', {}),
+ }
+
+
+# Check that the v2 route templates match those used by Mozharness. This can
+# go away once Mozharness builds are no longer performed in Buildbot, and the
+# Mozharness code referencing routes.json is deleted.
+def check_v2_routes():
+ with open("testing/mozharness/configs/routes.json", "rb") as f:
+ routes_json = json.load(f)
+
+ # we only deal with the 'routes' key here
+ routes = routes_json['routes']
+
+ # we use different variables than mozharness
+ for mh, tg in [
+ ('{index}', 'index'),
+ ('{build_product}', '{product}'),
+ ('{build_name}-{build_type}', '{job-name-gecko-v2}'),
+ ('{year}.{month}.{day}.{pushdate}', '{build_date_long}')]:
+ routes = [r.replace(mh, tg) for r in routes]
+
+ if sorted(routes) != sorted(V2_ROUTE_TEMPLATES):
+ raise Exception("V2_ROUTE_TEMPLATES does not match Mozharness's routes.json: "
+ "%s vs %s" % (V2_ROUTE_TEMPLATES, routes))
+
+check_v2_routes()
diff --git a/taskcluster/taskgraph/transforms/tests/__init__.py b/taskcluster/taskgraph/transforms/tests/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/tests/__init__.py
diff --git a/taskcluster/taskgraph/transforms/tests/all_kinds.py b/taskcluster/taskgraph/transforms/tests/all_kinds.py
new file mode 100644
index 000000000..f2aa1f841
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/tests/all_kinds.py
@@ -0,0 +1,137 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Changes here apply to all tests, regardless of kind.
+
+This is a great place for:
+
+ * Applying rules based on platform, project, etc. that should span kinds
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.util.treeherder import split_symbol, join_symbol
+from taskgraph.transforms.base import TransformSequence, get_keyed_by
+
+import copy
+
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_worker_implementation(config, tests):
+ """Set the worker implementation based on the test platform."""
+ for test in tests:
+ if test['test-platform'].startswith('win'):
+ test['worker-implementation'] = 'generic-worker'
+ elif test['test-platform'].startswith('macosx'):
+ test['worker-implementation'] = 'macosx-engine'
+ else:
+ test['worker-implementation'] = 'docker-worker'
+ yield test
+
+
+@transforms.add
+def set_tier(config, tests):
+ """Set the tier based on policy for all test descriptions that do not
+ specify a tier otherwise."""
+ for test in tests:
+ # only override if not set for the test
+ if 'tier' not in test:
+ if test['test-platform'] in ['linux64/debug',
+ 'linux64-asan/opt',
+ 'android-4.3-arm7-api-15/debug',
+ 'android-x86/opt']:
+ test['tier'] = 1
+ else:
+ test['tier'] = 2
+ yield test
+
+
+@transforms.add
+def set_expires_after(config, tests):
+ """Try jobs expire after 2 weeks; everything else lasts 1 year. This helps
+ keep storage costs low."""
+ for test in tests:
+ if 'expires-after' not in test:
+ if config.params['project'] == 'try':
+ test['expires-after'] = "14 days"
+ else:
+ test['expires-after'] = "1 year"
+ yield test
+
+
+@transforms.add
+def set_download_symbols(config, tests):
+ """In general, we download symbols immediately for debug builds, but only
+ on demand for everything else. ASAN builds shouldn't download
+ symbols since they don't product symbol zips see bug 1283879"""
+ for test in tests:
+ if test['test-platform'].split('/')[-1] == 'debug':
+ test['mozharness']['download-symbols'] = True
+ elif test['build-platform'] == 'linux64-asan/opt':
+ if 'download-symbols' in test['mozharness']:
+ del test['mozharness']['download-symbols']
+ else:
+ test['mozharness']['download-symbols'] = 'ondemand'
+ yield test
+
+
+@transforms.add
+def resolve_keyed_by(config, tests):
+ """Resolve fields that can be keyed by platform, etc."""
+ fields = [
+ 'instance-size',
+ 'max-run-time',
+ 'chunks',
+ 'e10s',
+ 'suite',
+ 'run-on-projects',
+ ]
+ for test in tests:
+ for field in fields:
+ test[field] = get_keyed_by(item=test, field=field, item_name=test['test-name'])
+ test['mozharness']['config'] = get_keyed_by(item=test,
+ field='mozharness',
+ subfield='config',
+ item_name=test['test-name'])
+ test['mozharness']['extra-options'] = get_keyed_by(item=test,
+ field='mozharness',
+ subfield='extra-options',
+ item_name=test['test-name'])
+ yield test
+
+
+@transforms.add
+def split_chunks(config, tests):
+ """Based on the 'chunks' key, split tests up into chunks by duplicating
+ them and assigning 'this-chunk' appropriately and updating the treeherder
+ symbol."""
+ for test in tests:
+ if test['chunks'] == 1:
+ test['this-chunk'] = 1
+ yield test
+ continue
+
+ for this_chunk in range(1, test['chunks'] + 1):
+ # copy the test and update with the chunk number
+ chunked = copy.deepcopy(test)
+ chunked['this-chunk'] = this_chunk
+
+ # add the chunk number to the TH symbol
+ group, symbol = split_symbol(chunked['treeherder-symbol'])
+ symbol += str(this_chunk)
+ chunked['treeherder-symbol'] = join_symbol(group, symbol)
+
+ yield chunked
+
+
+@transforms.add
+def set_retry_exit_status(config, tests):
+ """Set the retry exit status to TBPL_RETRY, the value returned by mozharness
+ scripts to indicate a transient failure that should be retried."""
+ for test in tests:
+ test['retry-exit-status'] = 4
+ yield test
diff --git a/taskcluster/taskgraph/transforms/tests/android_test.py b/taskcluster/taskgraph/transforms/tests/android_test.py
new file mode 100644
index 000000000..7c13b16f5
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/tests/android_test.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+These transforms are specific to the android-test kind, and apply defaults to
+the test descriptions appropriate to that kind.
+
+Both the input to and output from these transforms must conform to
+`taskgraph.transforms.tests.test:test_schema`.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_defaults(config, tests):
+ for test in tests:
+ # all Android test tasks download internal objects from tooltool
+ test['mozharness']['tooltool-downloads'] = True
+ test['mozharness']['build-artifact-name'] = 'public/build/target.apk'
+ test['mozharness']['actions'] = ['get-secrets']
+ yield test
+
+
+@transforms.add
+def set_treeherder_machine_platform(config, tests):
+ """Set the appropriate task.extra.treeherder.machine.platform"""
+ # The build names for these build platforms have partially evolved over the
+ # years.. This is temporary until we can clean up the handling of
+ # platforms
+ translation = {
+ 'android-api-15/debug': 'android-4-3-armv7-api15/debug',
+ 'android-api-15/opt': 'android-4-3-armv7-api15/opt',
+ 'android-x86/opt': 'android-4-2-x86/opt',
+ }
+ for test in tests:
+ build_platform = test['build-platform']
+ test['treeherder-machine-platform'] = translation.get(build_platform, build_platform)
+ yield test
diff --git a/taskcluster/taskgraph/transforms/tests/desktop_test.py b/taskcluster/taskgraph/transforms/tests/desktop_test.py
new file mode 100644
index 000000000..44a907903
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/tests/desktop_test.py
@@ -0,0 +1,118 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+These transforms are specific to the desktop-test kind, and apply defaults to
+the test descriptions appropriate to that kind.
+
+Both the input to and output from these transforms must conform to
+`taskgraph.transforms.tests.test:test_schema`.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+from taskgraph.transforms.base import TransformSequence, get_keyed_by
+from taskgraph.util.treeherder import split_symbol, join_symbol
+
+import copy
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_defaults(config, tests):
+ for test in tests:
+ build_platform = test['build-platform']
+ if build_platform.startswith('macosx'):
+ target = 'target.dmg'
+ else:
+ target = 'target.tar.bz2'
+ test['mozharness']['build-artifact-name'] = 'public/build/' + target
+ # all desktop tests want to run the bits that require node
+ test['mozharness']['set-moz-node-path'] = True
+ yield test
+
+
+@transforms.add
+def set_treeherder_machine_platform(config, tests):
+ """Set the appropriate task.extra.treeherder.machine.platform"""
+ # Linux64 build platforms for asan and pgo are specified differently to
+ # treeherder. This is temporary until we can clean up the handling of
+ # platforms
+ translation = {
+ 'linux64-asan/opt': 'linux64/asan',
+ 'linux64-pgo/opt': 'linux64/pgo',
+ 'macosx64/debug': 'osx-10-10/debug',
+ 'macosx64/opt': 'osx-10-10/opt',
+ }
+ for test in tests:
+ build_platform = test['build-platform']
+ test_platform = test['test-platform']
+ test['treeherder-machine-platform'] = translation.get(build_platform, test_platform)
+ yield test
+
+
+@transforms.add
+def set_asan_docker_image(config, tests):
+ """Set the appropriate task.extra.treeherder.docker-image"""
+ # Linux64-asan has many leaks with running mochitest-media jobs
+ # on Ubuntu 16.04, please remove this when bug 1289209 is resolved
+ for test in tests:
+ if test['suite'] == 'mochitest/mochitest-media' and \
+ test['build-platform'] == 'linux64-asan/opt':
+ test['docker-image'] = {"in-tree": "desktop-test"}
+ yield test
+
+
+@transforms.add
+def split_e10s(config, tests):
+ for test in tests:
+ e10s = get_keyed_by(item=test, field='e10s',
+ item_name=test['test-name'])
+ test.setdefault('attributes', {})
+ test['e10s'] = False
+ test['attributes']['e10s'] = False
+
+ if e10s == 'both':
+ yield test
+ test = copy.deepcopy(test)
+ e10s = True
+ if e10s:
+ test['test-name'] += '-e10s'
+ test['e10s'] = True
+ test['attributes']['e10s'] = True
+ group, symbol = split_symbol(test['treeherder-symbol'])
+ if group != '?':
+ group += '-e10s'
+ test['treeherder-symbol'] = join_symbol(group, symbol)
+ test['mozharness'].setdefault('extra-options', []).append('--e10s')
+ yield test
+
+
+@transforms.add
+def allow_software_gl_layers(config, tests):
+ for test in tests:
+
+ # since this value defaults to true, but is not applicable on windows,
+ # it's overriden for that platform here.
+ allow = not test['test-platform'].startswith('win') \
+ and get_keyed_by(item=test, field='allow-software-gl-layers',
+ item_name=test['test-name'])
+ if allow:
+ assert test['instance-size'] != 'legacy',\
+ 'Software GL layers on a legacy instance is disallowed (bug 1296086).'
+
+ # This should be set always once bug 1296086 is resolved.
+ test['mozharness'].setdefault('extra-options', [])\
+ .append("--allow-software-gl-layers")
+
+ yield test
+
+
+@transforms.add
+def add_os_groups(config, tests):
+ for test in tests:
+ if test['test-platform'].startswith('win'):
+ groups = get_keyed_by(item=test, field='os-groups', item_name=test['test-name'])
+ if groups:
+ test['os-groups'] = groups
+ yield test
diff --git a/taskcluster/taskgraph/transforms/tests/make_task_description.py b/taskcluster/taskgraph/transforms/tests/make_task_description.py
new file mode 100644
index 000000000..fc3f94893
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/tests/make_task_description.py
@@ -0,0 +1,445 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+These transforms construct a task description to run the given test, based on a
+test description. The implementation here is shared among all test kinds, but
+contains specific support for how we run tests in Gecko (via mozharness,
+invoked in particular ways).
+
+This is a good place to translate a test-description option such as
+`single-core: true` to the implementation of that option in a task description
+(worker options, mozharness commandline, environment variables, etc.)
+
+The test description should be fully formed by the time it reaches these
+transforms, and these transforms should not embody any specific knowledge about
+what should run where. this is the wrong place for special-casing platforms,
+for example - use `all_tests.py` instead.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.transforms.job.common import (
+ docker_worker_support_vcs_checkout,
+)
+
+import logging
+import os.path
+
+ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
+WORKER_TYPE = {
+ # default worker types keyed by instance-size
+ 'large': 'aws-provisioner-v1/gecko-t-linux-large',
+ 'xlarge': 'aws-provisioner-v1/gecko-t-linux-xlarge',
+ 'legacy': 'aws-provisioner-v1/gecko-t-linux-medium',
+ 'default': 'aws-provisioner-v1/gecko-t-linux-large',
+ # windows worker types keyed by test-platform
+ 'windows7-32-vm': 'aws-provisioner-v1/gecko-t-win7-32',
+ 'windows7-32': 'aws-provisioner-v1/gecko-t-win7-32-gpu',
+ 'windows10-64-vm': 'aws-provisioner-v1/gecko-t-win10-64',
+ 'windows10-64': 'aws-provisioner-v1/gecko-t-win10-64-gpu'
+}
+
+ARTIFACTS = [
+ # (artifact name prefix, in-image path)
+ ("public/logs/", "build/upload/logs/"),
+ ("public/test", "artifacts/"),
+ ("public/test_info/", "build/blobber_upload_dir/"),
+]
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_task_description(config, tests):
+ """Convert *test* descriptions to *task* descriptions (input to
+ taskgraph.transforms.task)"""
+
+ for test in tests:
+ label = '{}-{}-{}'.format(config.kind, test['test-platform'], test['test-name'])
+ if test['chunks'] > 1:
+ label += '-{}'.format(test['this-chunk'])
+
+ build_label = test['build-label']
+
+ unittest_try_name = test.get('unittest-try-name', test['test-name'])
+
+ attr_build_platform, attr_build_type = test['build-platform'].split('/', 1)
+
+ suite = test['suite']
+ if '/' in suite:
+ suite, flavor = suite.split('/', 1)
+ else:
+ flavor = suite
+
+ attributes = test.get('attributes', {})
+ attributes.update({
+ 'build_platform': attr_build_platform,
+ 'build_type': attr_build_type,
+ # only keep the first portion of the test platform
+ 'test_platform': test['test-platform'].split('/')[0],
+ 'test_chunk': str(test['this-chunk']),
+ 'unittest_suite': suite,
+ 'unittest_flavor': flavor,
+ 'unittest_try_name': unittest_try_name,
+ })
+
+ taskdesc = {}
+ taskdesc['label'] = label
+ taskdesc['description'] = test['description']
+ taskdesc['attributes'] = attributes
+ taskdesc['dependencies'] = {'build': build_label}
+ taskdesc['deadline-after'] = '1 day'
+ taskdesc['expires-after'] = test['expires-after']
+ taskdesc['routes'] = []
+ taskdesc['run-on-projects'] = test.get('run-on-projects', ['all'])
+ taskdesc['scopes'] = []
+ taskdesc['extra'] = {
+ 'chunks': {
+ 'current': test['this-chunk'],
+ 'total': test['chunks'],
+ },
+ 'suite': {
+ 'name': suite,
+ 'flavor': flavor,
+ },
+ }
+ taskdesc['treeherder'] = {
+ 'symbol': test['treeherder-symbol'],
+ 'kind': 'test',
+ 'tier': test['tier'],
+ 'platform': test.get('treeherder-machine-platform', test['build-platform']),
+ }
+
+ # the remainder (the worker-type and worker) differs depending on the
+ # worker implementation
+ worker_setup_functions[test['worker-implementation']](config, test, taskdesc)
+
+ # yield only the task description, discarding the test description
+ yield taskdesc
+
+
+worker_setup_functions = {}
+
+
+def worker_setup_function(name):
+ def wrap(func):
+ worker_setup_functions[name] = func
+ return func
+ return wrap
+
+
+@worker_setup_function("docker-engine")
+@worker_setup_function("docker-worker")
+def docker_worker_setup(config, test, taskdesc):
+
+ artifacts = [
+ # (artifact name prefix, in-image path)
+ ("public/logs/", "/home/worker/workspace/build/upload/logs/"),
+ ("public/test", "/home/worker/artifacts/"),
+ ("public/test_info/", "/home/worker/workspace/build/blobber_upload_dir/"),
+ ]
+ mozharness = test['mozharness']
+
+ installer_url = ARTIFACT_URL.format('<build>', mozharness['build-artifact-name'])
+ test_packages_url = ARTIFACT_URL.format('<build>',
+ 'public/build/target.test_packages.json')
+ mozharness_url = ARTIFACT_URL.format('<build>',
+ 'public/build/mozharness.zip')
+
+ taskdesc['worker-type'] = WORKER_TYPE[test['instance-size']]
+
+ worker = taskdesc['worker'] = {}
+ worker['implementation'] = test['worker-implementation']
+ worker['docker-image'] = test['docker-image']
+
+ worker['allow-ptrace'] = True # required for all tests, for crashreporter
+ worker['relengapi-proxy'] = False # but maybe enabled for tooltool below
+ worker['loopback-video'] = test['loopback-video']
+ worker['loopback-audio'] = test['loopback-audio']
+ worker['max-run-time'] = test['max-run-time']
+ worker['retry-exit-status'] = test['retry-exit-status']
+
+ worker['artifacts'] = [{
+ 'name': prefix,
+ 'path': os.path.join('/home/worker/workspace', path),
+ 'type': 'directory',
+ } for (prefix, path) in artifacts]
+
+ worker['caches'] = [{
+ 'type': 'persistent',
+ 'name': 'level-{}-{}-test-workspace'.format(
+ config.params['level'], config.params['project']),
+ 'mount-point': "/home/worker/workspace",
+ }]
+
+ env = worker['env'] = {
+ 'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
+ 'MOZHARNESS_SCRIPT': mozharness['script'],
+ 'MOZILLA_BUILD_URL': {'task-reference': installer_url},
+ 'NEED_PULSEAUDIO': 'true',
+ 'NEED_WINDOW_MANAGER': 'true',
+ }
+
+ if mozharness['set-moz-node-path']:
+ env['MOZ_NODE_PATH'] = '/usr/local/bin/node'
+
+ if 'actions' in mozharness:
+ env['MOZHARNESS_ACTIONS'] = ' '.join(mozharness['actions'])
+
+ if config.params['project'] == 'try':
+ env['TRY_COMMIT_MSG'] = config.params['message']
+
+ # handle some of the mozharness-specific options
+
+ if mozharness['tooltool-downloads']:
+ worker['relengapi-proxy'] = True
+ worker['caches'].append({
+ 'type': 'persistent',
+ 'name': 'tooltool-cache',
+ 'mount-point': '/home/worker/tooltool-cache',
+ })
+ taskdesc['scopes'].extend([
+ 'docker-worker:relengapi-proxy:tooltool.download.internal',
+ 'docker-worker:relengapi-proxy:tooltool.download.public',
+ ])
+
+ # assemble the command line
+ command = [
+ '/home/worker/bin/run-task',
+ # The workspace cache/volume is default owned by root:root.
+ '--chown', '/home/worker/workspace',
+ ]
+
+ # Support vcs checkouts regardless of whether the task runs from
+ # source or not in case it is needed on an interactive loaner.
+ docker_worker_support_vcs_checkout(config, test, taskdesc)
+
+ # If we have a source checkout, run mozharness from it instead of
+ # downloading a zip file with the same content.
+ if test['checkout']:
+ command.extend(['--vcs-checkout', '/home/worker/checkouts/gecko'])
+ env['MOZHARNESS_PATH'] = '/home/worker/checkouts/gecko/testing/mozharness'
+ else:
+ env['MOZHARNESS_URL'] = {'task-reference': mozharness_url}
+
+ command.extend([
+ '--',
+ '/home/worker/bin/test-linux.sh',
+ ])
+
+ if mozharness.get('no-read-buildbot-config'):
+ command.append("--no-read-buildbot-config")
+ command.extend([
+ {"task-reference": "--installer-url=" + installer_url},
+ {"task-reference": "--test-packages-url=" + test_packages_url},
+ ])
+ command.extend(mozharness.get('extra-options', []))
+
+ # TODO: remove the need for run['chunked']
+ if mozharness.get('chunked') or test['chunks'] > 1:
+ # Implement mozharness['chunking-args'], modifying command in place
+ if mozharness['chunking-args'] == 'this-chunk':
+ command.append('--total-chunk={}'.format(test['chunks']))
+ command.append('--this-chunk={}'.format(test['this-chunk']))
+ elif mozharness['chunking-args'] == 'test-suite-suffix':
+ suffix = mozharness['chunk-suffix'].replace('<CHUNK>', str(test['this-chunk']))
+ for i, c in enumerate(command):
+ if isinstance(c, basestring) and c.startswith('--test-suite'):
+ command[i] += suffix
+
+ if 'download-symbols' in mozharness:
+ download_symbols = mozharness['download-symbols']
+ download_symbols = {True: 'true', False: 'false'}.get(download_symbols, download_symbols)
+ command.append('--download-symbols=' + download_symbols)
+
+ worker['command'] = command
+
+
+def normpath(path):
+ return path.replace('/', '\\')
+
+
+def get_firefox_version():
+ with open('browser/config/version.txt', 'r') as f:
+ return f.readline().strip()
+
+
+@worker_setup_function('generic-worker')
+def generic_worker_setup(config, test, taskdesc):
+ artifacts = [
+ {
+ 'path': 'public\\logs\\localconfig.json',
+ 'type': 'file'
+ },
+ {
+ 'path': 'public\\logs\\log_critical.log',
+ 'type': 'file'
+ },
+ {
+ 'path': 'public\\logs\\log_error.log',
+ 'type': 'file'
+ },
+ {
+ 'path': 'public\\logs\\log_fatal.log',
+ 'type': 'file'
+ },
+ {
+ 'path': 'public\\logs\\log_info.log',
+ 'type': 'file'
+ },
+ {
+ 'path': 'public\\logs\\log_raw.log',
+ 'type': 'file'
+ },
+ {
+ 'path': 'public\\logs\\log_warning.log',
+ 'type': 'file'
+ },
+ {
+ 'path': 'public\\test_info',
+ 'type': 'directory'
+ }
+ ]
+ mozharness = test['mozharness']
+
+ build_platform = taskdesc['attributes']['build_platform']
+ test_platform = test['test-platform'].split('/')[0]
+
+ target = 'firefox-{}.en-US.{}'.format(get_firefox_version(), build_platform)
+
+ installer_url = ARTIFACT_URL.format(
+ '<build>', 'public/build/{}.zip'.format(target))
+ test_packages_url = ARTIFACT_URL.format(
+ '<build>', 'public/build/{}.test_packages.json'.format(target))
+ mozharness_url = ARTIFACT_URL.format(
+ '<build>', 'public/build/mozharness.zip')
+
+ taskdesc['worker-type'] = WORKER_TYPE[test_platform]
+
+ taskdesc['scopes'].extend(
+ ['generic-worker:os-group:{}'.format(group) for group in test['os-groups']])
+
+ worker = taskdesc['worker'] = {}
+ worker['os-groups'] = test['os-groups']
+ worker['implementation'] = test['worker-implementation']
+ worker['max-run-time'] = test['max-run-time']
+ worker['artifacts'] = artifacts
+
+ env = worker['env'] = {
+ # Bug 1306989
+ 'APPDATA': '%cd%\\AppData\\Roaming',
+ 'LOCALAPPDATA': '%cd%\\AppData\\Local',
+ 'TEMP': '%cd%\\AppData\\Local\\Temp',
+ 'TMP': '%cd%\\AppData\\Local\\Temp',
+ 'USERPROFILE': '%cd%',
+ }
+
+ # assemble the command line
+ mh_command = [
+ 'c:\\mozilla-build\\python\\python.exe',
+ '-u',
+ 'mozharness\\scripts\\' + normpath(mozharness['script'])
+ ]
+ for mh_config in mozharness['config']:
+ mh_command.extend(['--cfg', 'mozharness\\configs\\' + normpath(mh_config)])
+ mh_command.extend(mozharness.get('extra-options', []))
+ if mozharness.get('no-read-buildbot-config'):
+ mh_command.append('--no-read-buildbot-config')
+ mh_command.extend(['--installer-url', installer_url])
+ mh_command.extend(['--test-packages-url', test_packages_url])
+ if mozharness.get('download-symbols'):
+ if isinstance(mozharness['download-symbols'], basestring):
+ mh_command.extend(['--download-symbols', mozharness['download-symbols']])
+ else:
+ mh_command.extend(['--download-symbols', 'true'])
+
+ # TODO: remove the need for run['chunked']
+ if mozharness.get('chunked') or test['chunks'] > 1:
+ # Implement mozharness['chunking-args'], modifying command in place
+ if mozharness['chunking-args'] == 'this-chunk':
+ mh_command.append('--total-chunk={}'.format(test['chunks']))
+ mh_command.append('--this-chunk={}'.format(test['this-chunk']))
+ elif mozharness['chunking-args'] == 'test-suite-suffix':
+ suffix = mozharness['chunk-suffix'].replace('<CHUNK>', str(test['this-chunk']))
+ for i, c in enumerate(mh_command):
+ if isinstance(c, basestring) and c.startswith('--test-suite'):
+ mh_command[i] += suffix
+
+ worker['command'] = [
+ 'mkdir {} {}'.format(env['APPDATA'], env['TMP']),
+ {'task-reference': 'c:\\mozilla-build\\wget\\wget.exe {}'.format(mozharness_url)},
+ 'c:\\mozilla-build\\info-zip\\unzip.exe mozharness.zip',
+ {'task-reference': ' '.join(mh_command)},
+ 'xcopy build\\blobber_upload_dir public\\test_info /e /i',
+ 'copy /y logs\\*.* public\\logs\\'
+ ]
+
+
+@worker_setup_function("macosx-engine")
+def macosx_engine_setup(config, test, taskdesc):
+ mozharness = test['mozharness']
+
+ installer_url = ARTIFACT_URL.format('<build>', mozharness['build-artifact-name'])
+ test_packages_url = ARTIFACT_URL.format('<build>',
+ 'public/build/target.test_packages.json')
+ mozharness_url = ARTIFACT_URL.format('<build>',
+ 'public/build/mozharness.zip')
+
+ # for now we have only 10.10 machines
+ taskdesc['worker-type'] = 'tc-worker-provisioner/gecko-t-osx-10-10'
+
+ worker = taskdesc['worker'] = {}
+ worker['implementation'] = test['worker-implementation']
+
+ worker['artifacts'] = [{
+ 'name': prefix.rstrip('/'),
+ 'path': path.rstrip('/'),
+ 'type': 'directory',
+ } for (prefix, path) in ARTIFACTS]
+
+ worker['env'] = {
+ 'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
+ 'GECKO_HEAD_REV': config.params['head_rev'],
+ 'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
+ 'MOZHARNESS_SCRIPT': mozharness['script'],
+ 'MOZHARNESS_URL': {'task-reference': mozharness_url},
+ 'MOZILLA_BUILD_URL': {'task-reference': installer_url},
+ }
+
+ # assemble the command line
+
+ worker['link'] = '{}/raw-file/{}/taskcluster/scripts/tester/test-macosx.sh'.format(
+ config.params['head_repository'], config.params['head_rev']
+ )
+
+ command = worker['command'] = ["./test-macosx.sh"]
+ if mozharness.get('no-read-buildbot-config'):
+ command.append("--no-read-buildbot-config")
+ command.extend([
+ {"task-reference": "--installer-url=" + installer_url},
+ {"task-reference": "--test-packages-url=" + test_packages_url},
+ ])
+ if mozharness.get('include-blob-upload-branch'):
+ command.append('--blob-upload-branch=' + config.params['project'])
+ command.extend(mozharness.get('extra-options', []))
+
+ # TODO: remove the need for run['chunked']
+ if mozharness.get('chunked') or test['chunks'] > 1:
+ # Implement mozharness['chunking-args'], modifying command in place
+ if mozharness['chunking-args'] == 'this-chunk':
+ command.append('--total-chunk={}'.format(test['chunks']))
+ command.append('--this-chunk={}'.format(test['this-chunk']))
+ elif mozharness['chunking-args'] == 'test-suite-suffix':
+ suffix = mozharness['chunk-suffix'].replace('<CHUNK>', str(test['this-chunk']))
+ for i, c in enumerate(command):
+ if isinstance(c, basestring) and c.startswith('--test-suite'):
+ command[i] += suffix
+
+ if 'download-symbols' in mozharness:
+ download_symbols = mozharness['download-symbols']
+ download_symbols = {True: 'true', False: 'false'}.get(download_symbols, download_symbols)
+ command.append('--download-symbols=' + download_symbols)
diff --git a/taskcluster/taskgraph/transforms/tests/test_description.py b/taskcluster/taskgraph/transforms/tests/test_description.py
new file mode 100644
index 000000000..1365919fe
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/tests/test_description.py
@@ -0,0 +1,235 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This file defines the schema for tests -- the things in `tests.yml`. It should
+be run both before and after the kind-specific transforms, to ensure that the
+transforms do not generate invalid tests.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.base import validate_schema
+from voluptuous import (
+ Any,
+ Optional,
+ Required,
+ Schema,
+)
+
+
+# Schema for a test description
+#
+# *****WARNING*****
+#
+# This is a great place for baffling cruft to accumulate, and that makes
+# everyone move more slowly. Be considerate of your fellow hackers!
+# See the warnings in taskcluster/docs/how-tos.rst
+#
+# *****WARNING*****
+test_description_schema = Schema({
+ # description of the suite, for the task metadata
+ 'description': basestring,
+
+ # test suite name, or <suite>/<flavor>
+ Required('suite'): Any(
+ basestring,
+ {'by-test-platform': {basestring: basestring}},
+ ),
+
+ # the name by which this test suite is addressed in try syntax; defaults to
+ # the test-name
+ Optional('unittest-try-name'): basestring,
+
+ # the symbol, or group(symbol), under which this task should appear in
+ # treeherder.
+ 'treeherder-symbol': basestring,
+
+ # the value to place in task.extra.treeherder.machine.platform; ideally
+ # this is the same as build-platform, and that is the default, but in
+ # practice it's not always a match.
+ Optional('treeherder-machine-platform'): basestring,
+
+ # attributes to appear in the resulting task (later transforms will add the
+ # common attributes)
+ Optional('attributes'): {basestring: object},
+
+ # The `run_on_projects` attribute, defaulting to "all". This dictates the
+ # projects on which this task should be included in the target task set.
+ # See the attributes documentation for details.
+ Optional('run-on-projects', default=['all']): Any(
+ [basestring],
+ {'by-test-platform': {basestring: [basestring]}},
+ ),
+
+ # the sheriffing tier for this task (default: set based on test platform)
+ Optional('tier'): int,
+
+ # number of chunks to create for this task. This can be keyed by test
+ # platform by passing a dictionary in the `by-test-platform` key. If the
+ # test platform is not found, the key 'default' will be tried.
+ Required('chunks', default=1): Any(
+ int,
+ {'by-test-platform': {basestring: int}},
+ ),
+
+ # the time (with unit) after which this task is deleted; default depends on
+ # the branch (see below)
+ Optional('expires-after'): basestring,
+
+ # Whether to run this task with e10s (desktop-test only). If false, run
+ # without e10s; if true, run with e10s; if 'both', run one task with and
+ # one task without e10s. E10s tasks have "-e10s" appended to the test name
+ # and treeherder group.
+ Required('e10s', default='both'): Any(
+ bool, 'both',
+ {'by-test-platform': {basestring: Any(bool, 'both')}},
+ ),
+
+ # The EC2 instance size to run these tests on.
+ Required('instance-size', default='default'): Any(
+ Any('default', 'large', 'xlarge', 'legacy'),
+ {'by-test-platform': {basestring: Any('default', 'large', 'xlarge', 'legacy')}},
+ ),
+
+ # Whether the task requires loopback audio or video (whatever that may mean
+ # on the platform)
+ Required('loopback-audio', default=False): bool,
+ Required('loopback-video', default=False): bool,
+
+ # Whether the test can run using a software GL implementation on Linux
+ # using the GL compositor. May not be used with "legacy" sized instances
+ # due to poor LLVMPipe performance (bug 1296086).
+ Optional('allow-software-gl-layers', default=True): bool,
+
+ # The worker implementation for this test, as dictated by policy and by the
+ # test platform.
+ Optional('worker-implementation'): Any(
+ 'docker-worker',
+ 'macosx-engine',
+ 'generic-worker',
+ # coming soon:
+ 'docker-engine',
+ 'buildbot-bridge',
+ ),
+
+ # For tasks that will run in docker-worker or docker-engine, this is the
+ # name of the docker image or in-tree docker image to run the task in. If
+ # in-tree, then a dependency will be created automatically. This is
+ # generally `desktop-test`, or an image that acts an awful lot like it.
+ Required('docker-image', default={'in-tree': 'desktop-test'}): Any(
+ # a raw Docker image path (repo/image:tag)
+ basestring,
+ # an in-tree generated docker image (from `testing/docker/<name>`)
+ {'in-tree': basestring}
+ ),
+
+ # seconds of runtime after which the task will be killed. Like 'chunks',
+ # this can be keyed by test pltaform.
+ Required('max-run-time', default=3600): Any(
+ int,
+ {'by-test-platform': {basestring: int}},
+ ),
+
+ # the exit status code that indicates the task should be retried
+ Optional('retry-exit-status'): int,
+
+ # Whether to perform a gecko checkout.
+ Required('checkout', default=False): bool,
+
+ # What to run
+ Required('mozharness'): Any({
+ # the mozharness script used to run this task
+ Required('script'): basestring,
+
+ # the config files required for the task
+ Required('config'): Any(
+ [basestring],
+ {'by-test-platform': {basestring: [basestring]}},
+ ),
+
+ # any additional actions to pass to the mozharness command
+ Optional('actions'): [basestring],
+
+ # additional command-line options for mozharness, beyond those
+ # automatically added
+ Required('extra-options', default=[]): Any(
+ [basestring],
+ {'by-test-platform': {basestring: [basestring]}},
+ ),
+
+ # the artifact name (including path) to test on the build task; this is
+ # generally set in a per-kind transformation
+ Optional('build-artifact-name'): basestring,
+
+ # If true, tooltool downloads will be enabled via relengAPIProxy.
+ Required('tooltool-downloads', default=False): bool,
+
+ # This mozharness script also runs in Buildbot and tries to read a
+ # buildbot config file, so tell it not to do so in TaskCluster
+ Required('no-read-buildbot-config', default=False): bool,
+
+ # Add --blob-upload-branch=<project> mozharness parameter
+ Optional('include-blob-upload-branch'): bool,
+
+ # The setting for --download-symbols (if omitted, the option will not
+ # be passed to mozharness)
+ Optional('download-symbols'): Any(True, 'ondemand'),
+
+ # If set, then MOZ_NODE_PATH=/usr/local/bin/node is included in the
+ # environment. This is more than just a helpful path setting -- it
+ # causes xpcshell tests to start additional servers, and runs
+ # additional tests.
+ Required('set-moz-node-path', default=False): bool,
+
+ # If true, include chunking information in the command even if the number
+ # of chunks is 1
+ Required('chunked', default=False): bool,
+
+ # The chunking argument format to use
+ Required('chunking-args', default='this-chunk'): Any(
+ # Use the usual --this-chunk/--total-chunk arguments
+ 'this-chunk',
+ # Use --test-suite=<suite>-<chunk-suffix>; see chunk-suffix, below
+ 'test-suite-suffix',
+ ),
+
+ # the string to append to the `--test-suite` arugment when
+ # chunking-args = test-suite-suffix; "<CHUNK>" in this string will
+ # be replaced with the chunk number.
+ Optional('chunk-suffix'): basestring,
+ }),
+
+ # The current chunk; this is filled in by `all_kinds.py`
+ Optional('this-chunk'): int,
+
+ # os user groups for test task workers; required scopes, will be
+ # added automatically
+ Optional('os-groups', default=[]): Any(
+ [basestring],
+ # todo: create a dedicated elevated worker group and name here
+ {'by-test-platform': {basestring: [basestring]}},
+ ),
+
+ # -- values supplied by the task-generation infrastructure
+
+ # the platform of the build this task is testing
+ 'build-platform': basestring,
+
+ # the label of the build task generating the materials to test
+ 'build-label': basestring,
+
+ # the platform on which the tests will run
+ 'test-platform': basestring,
+
+ # the name of the test (the key in tests.yml)
+ 'test-name': basestring,
+
+}, required=True)
+
+
+# TODO: can we have validate and validate_full for before and after?
+def validate(config, tests):
+ for test in tests:
+ yield validate_schema(test_description_schema, test,
+ "In test {!r}:".format(test['test-name']))
diff --git a/taskcluster/taskgraph/transforms/upload_symbols.py b/taskcluster/taskgraph/transforms/upload_symbols.py
new file mode 100644
index 000000000..9b4884a97
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/upload_symbols.py
@@ -0,0 +1,36 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the upload-symbols task description template,
+ taskcluster/ci/upload-symbols/job-template.yml
+into an actual task description.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.base import TransformSequence
+
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def fill_template(config, tasks):
+ for task in tasks:
+ # Fill out the dynamic fields in the task description
+ task['label'] = task['build-label'] + '-upload-symbols'
+ task['dependencies'] = {'build': task['build-label']}
+ task['worker']['env']['GECKO_HEAD_REPOSITORY'] = config.params['head_repository']
+ task['worker']['env']['GECKO_HEAD_REV'] = config.params['head_rev']
+
+ build_platform, build_type = task['build-platform'].split('/')
+ attributes = task.setdefault('attributes', {})
+ attributes['build_platform'] = build_platform
+ attributes['build_type'] = build_type
+
+ # clear out the stuff that's not part of a task description
+ del task['build-label']
+ del task['build-platform']
+
+ yield task