summaryrefslogtreecommitdiffstats
path: root/taskcluster/taskgraph
diff options
context:
space:
mode:
authorMatt A. Tobin <mattatobin@localhost.localdomain>2018-02-02 04:16:08 -0500
committerMatt A. Tobin <mattatobin@localhost.localdomain>2018-02-02 04:16:08 -0500
commit5f8de423f190bbb79a62f804151bc24824fa32d8 (patch)
tree10027f336435511475e392454359edea8e25895d /taskcluster/taskgraph
parent49ee0794b5d912db1f95dce6eb52d781dc210db5 (diff)
downloadUXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.gz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.lz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.xz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.zip
Add m-esr52 at 52.6.0
Diffstat (limited to 'taskcluster/taskgraph')
-rw-r--r--taskcluster/taskgraph/__init__.py0
-rw-r--r--taskcluster/taskgraph/action.py68
-rw-r--r--taskcluster/taskgraph/action.yml74
-rw-r--r--taskcluster/taskgraph/create.py122
-rw-r--r--taskcluster/taskgraph/decision.py181
-rw-r--r--taskcluster/taskgraph/docker.py132
-rw-r--r--taskcluster/taskgraph/files_changed.py65
-rw-r--r--taskcluster/taskgraph/generator.py218
-rw-r--r--taskcluster/taskgraph/graph.py117
-rw-r--r--taskcluster/taskgraph/optimize.py156
-rw-r--r--taskcluster/taskgraph/parameters.py72
-rw-r--r--taskcluster/taskgraph/target_tasks.py121
-rw-r--r--taskcluster/taskgraph/task/__init__.py0
-rw-r--r--taskcluster/taskgraph/task/base.py108
-rw-r--r--taskcluster/taskgraph/task/docker_image.py130
-rw-r--r--taskcluster/taskgraph/task/post_build.py53
-rw-r--r--taskcluster/taskgraph/task/signing.py64
-rw-r--r--taskcluster/taskgraph/task/test.py112
-rw-r--r--taskcluster/taskgraph/task/transform.py109
-rw-r--r--taskcluster/taskgraph/taskgraph.py82
-rw-r--r--taskcluster/taskgraph/test/__init__.py0
-rw-r--r--taskcluster/taskgraph/test/automationrelevance.json425
-rw-r--r--taskcluster/taskgraph/test/test_create.py76
-rw-r--r--taskcluster/taskgraph/test/test_decision.py78
-rw-r--r--taskcluster/taskgraph/test/test_files_changed.py73
-rw-r--r--taskcluster/taskgraph/test/test_generator.py129
-rw-r--r--taskcluster/taskgraph/test/test_graph.py157
-rw-r--r--taskcluster/taskgraph/test/test_optimize.py256
-rw-r--r--taskcluster/taskgraph/test/test_parameters.py62
-rw-r--r--taskcluster/taskgraph/test/test_target_tasks.py81
-rw-r--r--taskcluster/taskgraph/test/test_task_docker_image.py35
-rw-r--r--taskcluster/taskgraph/test/test_taskgraph.py54
-rw-r--r--taskcluster/taskgraph/test/test_transforms_base.py143
-rw-r--r--taskcluster/taskgraph/test/test_try_option_syntax.py274
-rw-r--r--taskcluster/taskgraph/test/test_util_attributes.py45
-rw-r--r--taskcluster/taskgraph/test/test_util_docker.py194
-rw-r--r--taskcluster/taskgraph/test/test_util_python_path.py31
-rwxr-xr-xtaskcluster/taskgraph/test/test_util_templates.py232
-rwxr-xr-xtaskcluster/taskgraph/test/test_util_time.py57
-rw-r--r--taskcluster/taskgraph/test/test_util_treeherder.py23
-rw-r--r--taskcluster/taskgraph/test/test_util_yaml.py23
-rw-r--r--taskcluster/taskgraph/test/util.py24
-rw-r--r--taskcluster/taskgraph/transforms/__init__.py0
-rw-r--r--taskcluster/taskgraph/transforms/android_stuff.py46
-rw-r--r--taskcluster/taskgraph/transforms/base.py126
-rw-r--r--taskcluster/taskgraph/transforms/build.py31
-rw-r--r--taskcluster/taskgraph/transforms/build_attrs.py33
-rw-r--r--taskcluster/taskgraph/transforms/gecko_v2_whitelist.py77
-rw-r--r--taskcluster/taskgraph/transforms/job/__init__.py164
-rw-r--r--taskcluster/taskgraph/transforms/job/common.py108
-rw-r--r--taskcluster/taskgraph/transforms/job/hazard.py91
-rw-r--r--taskcluster/taskgraph/transforms/job/mach.py30
-rw-r--r--taskcluster/taskgraph/transforms/job/mozharness.py226
-rw-r--r--taskcluster/taskgraph/transforms/job/run_task.py59
-rw-r--r--taskcluster/taskgraph/transforms/job/spidermonkey.py86
-rw-r--r--taskcluster/taskgraph/transforms/job/toolchain.py115
-rw-r--r--taskcluster/taskgraph/transforms/l10n.py44
-rw-r--r--taskcluster/taskgraph/transforms/marionette_harness.py37
-rw-r--r--taskcluster/taskgraph/transforms/task.py648
-rw-r--r--taskcluster/taskgraph/transforms/tests/__init__.py0
-rw-r--r--taskcluster/taskgraph/transforms/tests/all_kinds.py137
-rw-r--r--taskcluster/taskgraph/transforms/tests/android_test.py42
-rw-r--r--taskcluster/taskgraph/transforms/tests/desktop_test.py118
-rw-r--r--taskcluster/taskgraph/transforms/tests/make_task_description.py445
-rw-r--r--taskcluster/taskgraph/transforms/tests/test_description.py235
-rw-r--r--taskcluster/taskgraph/transforms/upload_symbols.py36
-rw-r--r--taskcluster/taskgraph/try_option_syntax.py559
-rw-r--r--taskcluster/taskgraph/util/__init__.py0
-rw-r--r--taskcluster/taskgraph/util/attributes.py26
-rw-r--r--taskcluster/taskgraph/util/docker.py160
-rw-r--r--taskcluster/taskgraph/util/python_path.py27
-rw-r--r--taskcluster/taskgraph/util/seta.py85
-rw-r--r--taskcluster/taskgraph/util/templates.py155
-rw-r--r--taskcluster/taskgraph/util/time.py114
-rw-r--r--taskcluster/taskgraph/util/treeherder.py24
-rw-r--r--taskcluster/taskgraph/util/yaml.py16
76 files changed, 8556 insertions, 0 deletions
diff --git a/taskcluster/taskgraph/__init__.py b/taskcluster/taskgraph/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/taskcluster/taskgraph/__init__.py
diff --git a/taskcluster/taskgraph/action.py b/taskcluster/taskgraph/action.py
new file mode 100644
index 000000000..608fe3370
--- /dev/null
+++ b/taskcluster/taskgraph/action.py
@@ -0,0 +1,68 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import json
+import logging
+import requests
+import yaml
+
+from .create import create_tasks
+from .decision import write_artifact
+from .optimize import optimize_task_graph
+from .taskgraph import TaskGraph
+
+logger = logging.getLogger(__name__)
+TASKCLUSTER_QUEUE_URL = "https://queue.taskcluster.net/v1/task/"
+
+
+def taskgraph_action(options):
+ """
+ Run the action task. This function implements `mach taskgraph action-task`,
+ and is responsible for
+
+ * creating taskgraph of tasks asked for in parameters with respect to
+ a given gecko decision task and schedule these jobs.
+ """
+
+ decision_task_id = options['decision_id']
+ # read in the full graph for reference
+ full_task_json = get_artifact(decision_task_id, "public/full-task-graph.json")
+ decision_params = get_artifact(decision_task_id, "public/parameters.yml")
+ all_tasks, full_task_graph = TaskGraph.from_json(full_task_json)
+
+ target_tasks = set(options['task_labels'].split(','))
+ target_graph = full_task_graph.graph.transitive_closure(target_tasks)
+ target_task_graph = TaskGraph(
+ {l: all_tasks[l] for l in target_graph.nodes},
+ target_graph)
+
+ existing_tasks = get_artifact(decision_task_id, "public/label-to-taskid.json")
+
+ # We don't want to optimize target tasks since they have been requested by user
+ # Hence we put `target_tasks under` `do_not_optimize`
+ optimized_graph, label_to_taskid = optimize_task_graph(target_task_graph=target_task_graph,
+ params=decision_params,
+ do_not_optimize=target_tasks,
+ existing_tasks=existing_tasks)
+
+ # write out the optimized task graph to describe what will actually happen,
+ # and the map of labels to taskids
+ write_artifact('task-graph.json', optimized_graph.to_json())
+ write_artifact('label-to-taskid.json', label_to_taskid)
+ # actually create the graph
+ create_tasks(optimized_graph, label_to_taskid, decision_params)
+
+
+def get_artifact(task_id, path):
+ url = TASKCLUSTER_QUEUE_URL + task_id + "/artifacts/" + path
+ resp = requests.get(url=url)
+ if path.endswith('.json'):
+ artifact = json.loads(resp.text)
+ elif path.endswith('.yml'):
+ artifact = yaml.load(resp.text)
+ return artifact
diff --git a/taskcluster/taskgraph/action.yml b/taskcluster/taskgraph/action.yml
new file mode 100644
index 000000000..c816f4d5c
--- /dev/null
+++ b/taskcluster/taskgraph/action.yml
@@ -0,0 +1,74 @@
+---
+created: '{{now}}'
+deadline: '{{#from_now}}1 day{{/from_now}}'
+expires: '{{#from_now}}14 day{{/from_now}}'
+metadata:
+ owner: mozilla-taskcluster-maintenance@mozilla.com
+ source: 'https://hg.mozilla.org/{{project}}/file/{{head_rev}}/taskcluster/taskgraph/action.yml'
+ name: "[tc] Action Task"
+ description: Helps schedule new jobs without new push
+
+workerType: "gecko-decision"
+provisionerId: "aws-provisioner-v1"
+schedulerId: "gecko-level-{{level}}"
+
+tags:
+ createdForUser: {{owner}}
+
+scopes:
+ # Bug 1269443: cache scopes, etc. must be listed explicitly
+ - "docker-worker:cache:level-1-*"
+ - "docker-worker:cache:tooltool-cache"
+ - "secrets:get:project/taskcluster/gecko/hgfingerprint"
+ - "assume:repo:hg.mozilla.org/try:*"
+
+routes:
+ - "tc-treeherder.v2.{{project}}.{{head_rev}}.{{pushlog_id}}"
+ - "tc-treeherder-stage.v2.{{project}}.{{head_rev}}.{{pushlog_id}}"
+
+payload:
+ env:
+ GECKO_BASE_REPOSITORY: 'https://hg.mozilla.org/mozilla-unified'
+ GECKO_HEAD_REPOSITORY: '{{{head_repository}}}'
+ GECKO_HEAD_REF: '{{head_ref}}'
+ GECKO_HEAD_REV: '{{head_rev}}'
+ HG_STORE_PATH: /home/worker/checkouts/hg-store
+
+ cache:
+ level-{{level}}-checkouts: /home/worker/checkouts
+
+ features:
+ taskclusterProxy: true
+
+ # Note: This task is built server side without the context or tooling that
+ # exist in tree so we must hard code the version
+ image: 'taskcluster/decision:0.1.7'
+
+ # Virtually no network or other potentially risky operations happen as part
+ # of the task timeout aside from the initial clone. We intentionally have
+ # set this to a lower value _all_ decision tasks should use a root
+ # repository which is cached.
+ maxRunTime: 1800
+
+ command:
+ - /home/worker/bin/run-task
+ - '--vcs-checkout=/home/worker/checkouts/gecko'
+ - '--'
+ - bash
+ - -cx
+ - >
+ cd /home/worker/checkouts/gecko &&
+ ln -s /home/worker/artifacts artifacts &&
+ ./mach --log-no-times taskgraph action-task
+ --decision-id='{{decision_task_id}}'
+ --task-label='{{task_labels}}'
+
+ artifacts:
+ 'public':
+ type: 'directory'
+ path: '/home/worker/artifacts'
+ expires: '{{#from_now}}7 days{{/from_now}}'
+
+extra:
+ treeherder:
+ symbol: A
diff --git a/taskcluster/taskgraph/create.py b/taskcluster/taskgraph/create.py
new file mode 100644
index 000000000..f577f8873
--- /dev/null
+++ b/taskcluster/taskgraph/create.py
@@ -0,0 +1,122 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import concurrent.futures as futures
+import requests
+import requests.adapters
+import json
+import os
+import logging
+
+from slugid import nice as slugid
+from taskgraph.util.time import (
+ current_json_time,
+ json_time_from_now
+)
+
+logger = logging.getLogger(__name__)
+
+# the maximum number of parallel createTask calls to make
+CONCURRENCY = 50
+
+
+def create_tasks(taskgraph, label_to_taskid, params):
+ taskid_to_label = {t: l for l, t in label_to_taskid.iteritems()}
+
+ session = requests.Session()
+
+ # Default HTTPAdapter uses 10 connections. Mount custom adapter to increase
+ # that limit. Connections are established as needed, so using a large value
+ # should not negatively impact performance.
+ http_adapter = requests.adapters.HTTPAdapter(pool_connections=CONCURRENCY,
+ pool_maxsize=CONCURRENCY)
+ session.mount('https://', http_adapter)
+ session.mount('http://', http_adapter)
+
+ decision_task_id = os.environ.get('TASK_ID')
+
+ # when running as an actual decision task, we use the decision task's
+ # taskId as the taskGroupId. The process that created the decision task
+ # helpfully placed it in this same taskGroup. If there is no $TASK_ID,
+ # fall back to a slugid
+ task_group_id = decision_task_id or slugid()
+ scheduler_id = 'gecko-level-{}'.format(params['level'])
+
+ with futures.ThreadPoolExecutor(CONCURRENCY) as e:
+ fs = {}
+
+ # We can't submit a task until its dependencies have been submitted.
+ # So our strategy is to walk the graph and submit tasks once all
+ # their dependencies have been submitted.
+ #
+ # Using visit_postorder() here isn't the most efficient: we'll
+ # block waiting for dependencies of task N to submit even though
+ # dependencies for task N+1 may be finished. If we need to optimize
+ # this further, we can build a graph of task dependencies and walk
+ # that.
+ for task_id in taskgraph.graph.visit_postorder():
+ task_def = taskgraph.tasks[task_id].task
+ attributes = taskgraph.tasks[task_id].attributes
+ # if this task has no dependencies, make it depend on this decision
+ # task so that it does not start immediately; and so that if this loop
+ # fails halfway through, none of the already-created tasks run.
+ if decision_task_id and not task_def.get('dependencies'):
+ task_def['dependencies'] = [decision_task_id]
+
+ task_def['taskGroupId'] = task_group_id
+ task_def['schedulerId'] = scheduler_id
+
+ # Wait for dependencies before submitting this.
+ deps_fs = [fs[dep] for dep in task_def.get('dependencies', [])
+ if dep in fs]
+ for f in futures.as_completed(deps_fs):
+ f.result()
+
+ fs[task_id] = e.submit(_create_task, session, task_id,
+ taskid_to_label[task_id], task_def)
+
+ # Schedule tasks as many times as task_duplicates indicates
+ for i in range(1, attributes.get('task_duplicates', 1)):
+ # We use slugid() since we want a distinct task id
+ fs[task_id] = e.submit(_create_task, session, slugid(),
+ taskid_to_label[task_id], task_def)
+
+ # Wait for all futures to complete.
+ for f in futures.as_completed(fs.values()):
+ f.result()
+
+
+def _create_task(session, task_id, label, task_def):
+ # create the task using 'http://taskcluster/queue', which is proxied to the queue service
+ # with credentials appropriate to this job.
+
+ # Resolve timestamps
+ now = current_json_time(datetime_format=True)
+ task_def = resolve_timestamps(now, task_def)
+
+ logger.debug("Creating task with taskId {} for {}".format(task_id, label))
+ res = session.put('http://taskcluster/queue/v1/task/{}'.format(task_id),
+ data=json.dumps(task_def))
+ if res.status_code != 200:
+ try:
+ logger.error(res.json()['message'])
+ except:
+ logger.error(res.text)
+ res.raise_for_status()
+
+
+def resolve_timestamps(now, task_def):
+ def recurse(val):
+ if isinstance(val, list):
+ return [recurse(v) for v in val]
+ elif isinstance(val, dict):
+ if val.keys() == ['relative-datestamp']:
+ return json_time_from_now(val['relative-datestamp'], now)
+ else:
+ return {k: recurse(v) for k, v in val.iteritems()}
+ else:
+ return val
+ return recurse(task_def)
diff --git a/taskcluster/taskgraph/decision.py b/taskcluster/taskgraph/decision.py
new file mode 100644
index 000000000..d6460390f
--- /dev/null
+++ b/taskcluster/taskgraph/decision.py
@@ -0,0 +1,181 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+import json
+import logging
+
+import time
+import yaml
+
+from .generator import TaskGraphGenerator
+from .create import create_tasks
+from .parameters import Parameters
+from .target_tasks import get_method
+from .taskgraph import TaskGraph
+
+from taskgraph.util.templates import Templates
+from taskgraph.util.time import (
+ json_time_from_now,
+ current_json_time,
+)
+
+logger = logging.getLogger(__name__)
+
+ARTIFACTS_DIR = 'artifacts'
+GECKO = os.path.realpath(os.path.join(__file__, '..', '..', '..'))
+
+# For each project, this gives a set of parameters specific to the project.
+# See `taskcluster/docs/parameters.rst` for information on parameters.
+PER_PROJECT_PARAMETERS = {
+ 'try': {
+ 'target_tasks_method': 'try_option_syntax',
+ # Always perform optimization. This makes it difficult to use try
+ # pushes to run a task that would otherwise be optimized, but is a
+ # compromise to avoid essentially disabling optimization in try.
+ 'optimize_target_tasks': True,
+ },
+
+ 'ash': {
+ 'target_tasks_method': 'ash_tasks',
+ 'optimize_target_tasks': True,
+ },
+
+ 'cedar': {
+ 'target_tasks_method': 'cedar_tasks',
+ 'optimize_target_tasks': True,
+ },
+
+ # the default parameters are used for projects that do not match above.
+ 'default': {
+ 'target_tasks_method': 'default',
+ 'optimize_target_tasks': True,
+ }
+}
+
+
+def taskgraph_decision(options):
+ """
+ Run the decision task. This function implements `mach taskgraph decision`,
+ and is responsible for
+
+ * processing decision task command-line options into parameters
+ * running task-graph generation exactly the same way the other `mach
+ taskgraph` commands do
+ * generating a set of artifacts to memorialize the graph
+ * calling TaskCluster APIs to create the graph
+ """
+
+ parameters = get_decision_parameters(options)
+
+ # create a TaskGraphGenerator instance
+ target_tasks_method = parameters.get('target_tasks_method', 'all_tasks')
+ target_tasks_method = get_method(target_tasks_method)
+ tgg = TaskGraphGenerator(
+ root_dir=options['root'],
+ parameters=parameters,
+ target_tasks_method=target_tasks_method)
+
+ # write out the parameters used to generate this graph
+ write_artifact('parameters.yml', dict(**parameters))
+
+ # write out the yml file for action tasks
+ write_artifact('action.yml', get_action_yml(parameters))
+
+ # write out the full graph for reference
+ full_task_json = tgg.full_task_graph.to_json()
+ write_artifact('full-task-graph.json', full_task_json)
+
+ # this is just a test to check whether the from_json() function is working
+ _, _ = TaskGraph.from_json(full_task_json)
+
+ # write out the target task set to allow reproducing this as input
+ write_artifact('target-tasks.json', tgg.target_task_set.tasks.keys())
+
+ # write out the optimized task graph to describe what will actually happen,
+ # and the map of labels to taskids
+ write_artifact('task-graph.json', tgg.optimized_task_graph.to_json())
+ write_artifact('label-to-taskid.json', tgg.label_to_taskid)
+
+ # actually create the graph
+ create_tasks(tgg.optimized_task_graph, tgg.label_to_taskid, parameters)
+
+
+def get_decision_parameters(options):
+ """
+ Load parameters from the command-line options for 'taskgraph decision'.
+ This also applies per-project parameters, based on the given project.
+
+ """
+ parameters = {n: options[n] for n in [
+ 'base_repository',
+ 'head_repository',
+ 'head_rev',
+ 'head_ref',
+ 'message',
+ 'project',
+ 'pushlog_id',
+ 'pushdate',
+ 'owner',
+ 'level',
+ 'triggered_by',
+ 'target_tasks_method',
+ ] if n in options}
+
+ # owner must be an email, but sometimes (e.g., for ffxbld) it is not, in which
+ # case, fake it
+ if '@' not in parameters['owner']:
+ parameters['owner'] += '@noreply.mozilla.org'
+
+ # use the pushdate as build_date if given, else use current time
+ parameters['build_date'] = parameters['pushdate'] or int(time.time())
+ # moz_build_date is the build identifier based on build_date
+ parameters['moz_build_date'] = time.strftime("%Y%m%d%H%M%S",
+ time.gmtime(parameters['build_date']))
+
+ project = parameters['project']
+ try:
+ parameters.update(PER_PROJECT_PARAMETERS[project])
+ except KeyError:
+ logger.warning("using default project parameters; add {} to "
+ "PER_PROJECT_PARAMETERS in {} to customize behavior "
+ "for this project".format(project, __file__))
+ parameters.update(PER_PROJECT_PARAMETERS['default'])
+
+ # `target_tasks_method` has higher precedence than `project` parameters
+ if options.get('target_tasks_method'):
+ parameters['target_tasks_method'] = options['target_tasks_method']
+
+ return Parameters(parameters)
+
+
+def write_artifact(filename, data):
+ logger.info('writing artifact file `{}`'.format(filename))
+ if not os.path.isdir(ARTIFACTS_DIR):
+ os.mkdir(ARTIFACTS_DIR)
+ path = os.path.join(ARTIFACTS_DIR, filename)
+ if filename.endswith('.yml'):
+ with open(path, 'w') as f:
+ yaml.safe_dump(data, f, allow_unicode=True, default_flow_style=False)
+ elif filename.endswith('.json'):
+ with open(path, 'w') as f:
+ json.dump(data, f, sort_keys=True, indent=2, separators=(',', ': '))
+ else:
+ raise TypeError("Don't know how to write to {}".format(filename))
+
+
+def get_action_yml(parameters):
+ templates = Templates(os.path.join(GECKO, "taskcluster/taskgraph"))
+ action_parameters = parameters.copy()
+ action_parameters.update({
+ "decision_task_id": "{{decision_task_id}}",
+ "task_labels": "{{task_labels}}",
+ "from_now": json_time_from_now,
+ "now": current_json_time()
+ })
+ return templates.load('action.yml', action_parameters)
diff --git a/taskcluster/taskgraph/docker.py b/taskcluster/taskgraph/docker.py
new file mode 100644
index 000000000..8159fd5a4
--- /dev/null
+++ b/taskcluster/taskgraph/docker.py
@@ -0,0 +1,132 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import json
+import os
+import subprocess
+import tarfile
+import tempfile
+import urllib2
+import which
+
+from taskgraph.util import docker
+
+GECKO = os.path.realpath(os.path.join(__file__, '..', '..', '..'))
+IMAGE_DIR = os.path.join(GECKO, 'testing', 'docker')
+INDEX_URL = 'https://index.taskcluster.net/v1/task/' + docker.INDEX_PREFIX + '.{}.{}.hash.{}'
+ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
+
+
+def load_image_by_name(image_name):
+ context_path = os.path.join(GECKO, 'testing', 'docker', image_name)
+ context_hash = docker.generate_context_hash(GECKO, context_path, image_name)
+
+ image_index_url = INDEX_URL.format('mozilla-central', image_name, context_hash)
+ print("Fetching", image_index_url)
+ task = json.load(urllib2.urlopen(image_index_url))
+
+ return load_image_by_task_id(task['taskId'])
+
+
+def load_image_by_task_id(task_id):
+ # because we need to read this file twice (and one read is not all the way
+ # through), it is difficult to stream it. So we download to disk and then
+ # read it back.
+ filename = 'temp-docker-image.tar'
+
+ artifact_url = ARTIFACT_URL.format(task_id, 'public/image.tar.zst')
+ print("Downloading", artifact_url)
+ tempfilename = 'temp-docker-image.tar.zst'
+ subprocess.check_call(['curl', '-#', '-L', '-o', tempfilename, artifact_url])
+ print("Decompressing")
+ subprocess.check_call(['zstd', '-d', tempfilename, '-o', filename])
+ print("Deleting temporary file")
+ os.unlink(tempfilename)
+
+ print("Determining image name")
+ tf = tarfile.open(filename)
+ repositories = json.load(tf.extractfile('repositories'))
+ name = repositories.keys()[0]
+ tag = repositories[name].keys()[0]
+ name = '{}:{}'.format(name, tag)
+ print("Image name:", name)
+
+ print("Loading image into docker")
+ try:
+ subprocess.check_call(['docker', 'load', '-i', filename])
+ except subprocess.CalledProcessError:
+ print("*** `docker load` failed. You may avoid re-downloading that tarball by fixing the")
+ print("*** problem and running `docker load < {}`.".format(filename))
+ raise
+
+ print("Deleting temporary file")
+ os.unlink(filename)
+
+ print("The requested docker image is now available as", name)
+ print("Try: docker run -ti --rm {} bash".format(name))
+
+
+def build_context(name, outputFile):
+ """Build a context.tar for image with specified name.
+ """
+ if not name:
+ raise ValueError('must provide a Docker image name')
+ if not outputFile:
+ raise ValueError('must provide a outputFile')
+
+ image_dir = os.path.join(IMAGE_DIR, name)
+ if not os.path.isdir(image_dir):
+ raise Exception('image directory does not exist: %s' % image_dir)
+
+ docker.create_context_tar(GECKO, image_dir, outputFile, "")
+
+
+def build_image(name):
+ """Build a Docker image of specified name.
+
+ Output from image building process will be printed to stdout.
+ """
+ if not name:
+ raise ValueError('must provide a Docker image name')
+
+ image_dir = os.path.join(IMAGE_DIR, name)
+ if not os.path.isdir(image_dir):
+ raise Exception('image directory does not exist: %s' % image_dir)
+
+ tag = docker.docker_image(name, default_version='latest')
+
+ docker_bin = which.which('docker')
+
+ # Verify that Docker is working.
+ try:
+ subprocess.check_output([docker_bin, '--version'])
+ except subprocess.CalledProcessError:
+ raise Exception('Docker server is unresponsive. Run `docker ps` and '
+ 'check that Docker is running')
+
+ # We obtain a context archive and build from that. Going through the
+ # archive creation is important: it normalizes things like file owners
+ # and mtimes to increase the chances that image generation is
+ # deterministic.
+ fd, context_path = tempfile.mkstemp()
+ os.close(fd)
+ try:
+ docker.create_context_tar(GECKO, image_dir, context_path, name)
+ docker.build_from_context(docker_bin, context_path, name, tag)
+ finally:
+ os.unlink(context_path)
+
+ print('Successfully built %s and tagged with %s' % (name, tag))
+
+ if tag.endswith(':latest'):
+ print('*' * 50)
+ print('WARNING: no VERSION file found in image directory.')
+ print('Image is not suitable for deploying/pushing.')
+ print('Create an image suitable for deploying/pushing by creating')
+ print('a VERSION file in the image directory.')
+ print('*' * 50)
diff --git a/taskcluster/taskgraph/files_changed.py b/taskcluster/taskgraph/files_changed.py
new file mode 100644
index 000000000..973a62bb8
--- /dev/null
+++ b/taskcluster/taskgraph/files_changed.py
@@ -0,0 +1,65 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+Support for optimizing tasks based on the set of files that have changed.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import logging
+import requests
+from redo import retry
+from mozpack.path import match as mozpackmatch
+
+logger = logging.getLogger(__name__)
+_cache = {}
+
+
+def get_changed_files(repository, revision):
+ """
+ Get the set of files changed in the push headed by the given revision.
+ Responses are cached, so multiple calls with the same arguments are OK.
+ """
+ key = repository, revision
+ if key not in _cache:
+ url = '%s/json-automationrelevance/%s' % (repository.rstrip('/'), revision)
+ logger.debug("Querying version control for metadata: %s", url)
+
+ def get_automationrelevance():
+ response = requests.get(url, timeout=5)
+ return response.json()
+ contents = retry(get_automationrelevance, attempts=2, sleeptime=10)
+
+ logger.debug('{} commits influencing task scheduling:'
+ .format(len(contents['changesets'])))
+ changed_files = set()
+ for c in contents['changesets']:
+ logger.debug(" {cset} {desc}".format(
+ cset=c['node'][0:12],
+ desc=c['desc'].splitlines()[0].encode('ascii', 'ignore')))
+ changed_files |= set(c['files'])
+
+ _cache[key] = changed_files
+ return _cache[key]
+
+
+def check(params, file_patterns):
+ """Determine whether any of the files changed in the indicated push to
+ https://hg.mozilla.org match any of the given file patterns."""
+ repository = params.get('head_repository')
+ revision = params.get('head_rev')
+ if not repository or not revision:
+ logger.warning("Missing `head_repository` or `head_rev` parameters; "
+ "assuming all files have changed")
+ return True
+
+ changed_files = get_changed_files(repository, revision)
+
+ for pattern in file_patterns:
+ for path in changed_files:
+ if mozpackmatch(path, pattern):
+ return True
+
+ return False
diff --git a/taskcluster/taskgraph/generator.py b/taskcluster/taskgraph/generator.py
new file mode 100644
index 000000000..809ed1f5c
--- /dev/null
+++ b/taskcluster/taskgraph/generator.py
@@ -0,0 +1,218 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+import logging
+import os
+import yaml
+
+from .graph import Graph
+from .taskgraph import TaskGraph
+from .optimize import optimize_task_graph
+from .util.python_path import find_object
+
+logger = logging.getLogger(__name__)
+
+
+class Kind(object):
+
+ def __init__(self, name, path, config):
+ self.name = name
+ self.path = path
+ self.config = config
+
+ def _get_impl_class(self):
+ # load the class defined by implementation
+ try:
+ impl = self.config['implementation']
+ except KeyError:
+ raise KeyError("{!r} does not define implementation".format(self.path))
+ return find_object(impl)
+
+ def load_tasks(self, parameters, loaded_tasks):
+ impl_class = self._get_impl_class()
+ return impl_class.load_tasks(self.name, self.path, self.config,
+ parameters, loaded_tasks)
+
+
+class TaskGraphGenerator(object):
+ """
+ The central controller for taskgraph. This handles all phases of graph
+ generation. The task is generated from all of the kinds defined in
+ subdirectories of the generator's root directory.
+
+ Access to the results of this generation, as well as intermediate values at
+ various phases of generation, is available via properties. This encourages
+ the provision of all generation inputs at instance construction time.
+ """
+
+ # Task-graph generation is implemented as a Python generator that yields
+ # each "phase" of generation. This allows some mach subcommands to short-
+ # circuit generation of the entire graph by never completing the generator.
+
+ def __init__(self, root_dir, parameters,
+ target_tasks_method):
+ """
+ @param root_dir: root directory, with subdirectories for each kind
+ @param parameters: parameters for this task-graph generation
+ @type parameters: dict
+ @param target_tasks_method: function to determine the target_task_set;
+ see `./target_tasks.py`.
+ @type target_tasks_method: function
+ """
+
+ self.root_dir = root_dir
+ self.parameters = parameters
+ self.target_tasks_method = target_tasks_method
+
+ # this can be set up until the time the target task set is generated;
+ # it defaults to parameters['target_tasks']
+ self._target_tasks = parameters.get('target_tasks')
+
+ # start the generator
+ self._run = self._run()
+ self._run_results = {}
+
+ @property
+ def full_task_set(self):
+ """
+ The full task set: all tasks defined by any kind (a graph without edges)
+
+ @type: TaskGraph
+ """
+ return self._run_until('full_task_set')
+
+ @property
+ def full_task_graph(self):
+ """
+ The full task graph: the full task set, with edges representing
+ dependencies.
+
+ @type: TaskGraph
+ """
+ return self._run_until('full_task_graph')
+
+ @property
+ def target_task_set(self):
+ """
+ The set of targetted tasks (a graph without edges)
+
+ @type: TaskGraph
+ """
+ return self._run_until('target_task_set')
+
+ @property
+ def target_task_graph(self):
+ """
+ The set of targetted tasks and all of their dependencies
+
+ @type: TaskGraph
+ """
+ return self._run_until('target_task_graph')
+
+ @property
+ def optimized_task_graph(self):
+ """
+ The set of targetted tasks and all of their dependencies; tasks that
+ have been optimized out are either omitted or replaced with a Task
+ instance containing only a task_id.
+
+ @type: TaskGraph
+ """
+ return self._run_until('optimized_task_graph')
+
+ @property
+ def label_to_taskid(self):
+ """
+ A dictionary mapping task label to assigned taskId. This property helps
+ in interpreting `optimized_task_graph`.
+
+ @type: dictionary
+ """
+ return self._run_until('label_to_taskid')
+
+ def _load_kinds(self):
+ for path in os.listdir(self.root_dir):
+ path = os.path.join(self.root_dir, path)
+ if not os.path.isdir(path):
+ continue
+ kind_name = os.path.basename(path)
+
+ kind_yml = os.path.join(path, 'kind.yml')
+ if not os.path.exists(kind_yml):
+ continue
+
+ logger.debug("loading kind `{}` from `{}`".format(kind_name, path))
+ with open(kind_yml) as f:
+ config = yaml.load(f)
+
+ yield Kind(kind_name, path, config)
+
+ def _run(self):
+ logger.info("Loading kinds")
+ # put the kinds into a graph and sort topologically so that kinds are loaded
+ # in post-order
+ kinds = {kind.name: kind for kind in self._load_kinds()}
+ edges = set()
+ for kind in kinds.itervalues():
+ for dep in kind.config.get('kind-dependencies', []):
+ edges.add((kind.name, dep, 'kind-dependency'))
+ kind_graph = Graph(set(kinds), edges)
+
+ logger.info("Generating full task set")
+ all_tasks = {}
+ for kind_name in kind_graph.visit_postorder():
+ logger.debug("Loading tasks for kind {}".format(kind_name))
+ kind = kinds[kind_name]
+ new_tasks = kind.load_tasks(self.parameters, list(all_tasks.values()))
+ for task in new_tasks:
+ if task.label in all_tasks:
+ raise Exception("duplicate tasks with label " + task.label)
+ all_tasks[task.label] = task
+ logger.info("Generated {} tasks for kind {}".format(len(new_tasks), kind_name))
+ full_task_set = TaskGraph(all_tasks, Graph(set(all_tasks), set()))
+ yield 'full_task_set', full_task_set
+
+ logger.info("Generating full task graph")
+ edges = set()
+ for t in full_task_set:
+ for dep, depname in t.get_dependencies(full_task_set):
+ edges.add((t.label, dep, depname))
+
+ full_task_graph = TaskGraph(all_tasks,
+ Graph(full_task_set.graph.nodes, edges))
+ yield 'full_task_graph', full_task_graph
+
+ logger.info("Generating target task set")
+ target_tasks = set(self.target_tasks_method(full_task_graph, self.parameters))
+ target_task_set = TaskGraph(
+ {l: all_tasks[l] for l in target_tasks},
+ Graph(target_tasks, set()))
+ yield 'target_task_set', target_task_set
+
+ logger.info("Generating target task graph")
+ target_graph = full_task_graph.graph.transitive_closure(target_tasks)
+ target_task_graph = TaskGraph(
+ {l: all_tasks[l] for l in target_graph.nodes},
+ target_graph)
+ yield 'target_task_graph', target_task_graph
+
+ logger.info("Generating optimized task graph")
+ do_not_optimize = set()
+ if not self.parameters.get('optimize_target_tasks', True):
+ do_not_optimize = target_task_set.graph.nodes
+ optimized_task_graph, label_to_taskid = optimize_task_graph(target_task_graph,
+ self.parameters,
+ do_not_optimize)
+ yield 'label_to_taskid', label_to_taskid
+ yield 'optimized_task_graph', optimized_task_graph
+
+ def _run_until(self, name):
+ while name not in self._run_results:
+ try:
+ k, v = self._run.next()
+ except StopIteration:
+ raise AttributeError("No such run result {}".format(name))
+ self._run_results[k] = v
+ return self._run_results[name]
diff --git a/taskcluster/taskgraph/graph.py b/taskcluster/taskgraph/graph.py
new file mode 100644
index 000000000..731341c51
--- /dev/null
+++ b/taskcluster/taskgraph/graph.py
@@ -0,0 +1,117 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import collections
+
+
+class Graph(object):
+ """
+ Generic representation of a directed acyclic graph with labeled edges
+ connecting the nodes. Graph operations are implemented in a functional
+ manner, so the data structure is immutable.
+
+ It permits at most one edge of a given name between any set of nodes. The
+ graph is not checked for cycles, and methods may hang or otherwise fail if
+ given a cyclic graph.
+
+ The `nodes` and `edges` attributes may be accessed in a read-only fashion.
+ The `nodes` attribute is a set of node names, while `edges` is a set of
+ `(left, right, name)` tuples representing an edge named `name` going from
+ node `left` to node `right..
+ """
+
+ def __init__(self, nodes, edges):
+ """
+ Create a graph. Nodes and edges are both as described in the class
+ documentation. Both values are used by reference, and should not be
+ modified after building a graph.
+ """
+ assert isinstance(nodes, set)
+ assert isinstance(edges, set)
+ self.nodes = nodes
+ self.edges = edges
+
+ def __eq__(self, other):
+ return self.nodes == other.nodes and self.edges == other.edges
+
+ def __repr__(self):
+ return "<Graph nodes={!r} edges={!r}>".format(self.nodes, self.edges)
+
+ def transitive_closure(self, nodes):
+ """
+ Return the transitive closure of <nodes>: the graph containing all
+ specified nodes as well as any nodes reachable from them, and any
+ intervening edges.
+ """
+ assert isinstance(nodes, set)
+ assert nodes <= self.nodes
+
+ # generate a new graph by expanding along edges until reaching a fixed
+ # point
+ new_nodes, new_edges = nodes, set()
+ nodes, edges = set(), set()
+ while (new_nodes, new_edges) != (nodes, edges):
+ nodes, edges = new_nodes, new_edges
+ add_edges = set((left, right, name)
+ for (left, right, name) in self.edges
+ if left in nodes)
+ add_nodes = set(right for (_, right, _) in add_edges)
+ new_nodes = nodes | add_nodes
+ new_edges = edges | add_edges
+ return Graph(new_nodes, new_edges)
+
+ def visit_postorder(self):
+ """
+ Generate a sequence of nodes in postorder, such that every node is
+ visited *after* any nodes it links to.
+
+ Behavior is undefined (read: it will hang) if the graph contains a
+ cycle.
+ """
+ queue = collections.deque(sorted(self.nodes))
+ links_by_node = self.links_dict()
+ seen = set()
+ while queue:
+ node = queue.popleft()
+ if node in seen:
+ continue
+ links = links_by_node[node]
+ if all((n in seen) for n in links):
+ seen.add(node)
+ yield node
+ else:
+ queue.extend(n for n in links if n not in seen)
+ queue.append(node)
+
+ def links_dict(self):
+ """
+ Return a dictionary mapping each node to a set of the nodes it links to
+ (omitting edge names)
+ """
+ links = collections.defaultdict(set)
+ for left, right, _ in self.edges:
+ links[left].add(right)
+ return links
+
+ def named_links_dict(self):
+ """
+ Return a two-level dictionary mapping each node to a dictionary mapping
+ edge names to labels.
+ """
+ links = collections.defaultdict(dict)
+ for left, right, name in self.edges:
+ links[left][name] = right
+ return links
+
+ def reverse_links_dict(self):
+ """
+ Return a dictionary mapping each node to a set of the nodes linking to
+ it (omitting edge names)
+ """
+ links = collections.defaultdict(set)
+ for left, right, _ in self.edges:
+ links[right].add(left)
+ return links
diff --git a/taskcluster/taskgraph/optimize.py b/taskcluster/taskgraph/optimize.py
new file mode 100644
index 000000000..120e6807b
--- /dev/null
+++ b/taskcluster/taskgraph/optimize.py
@@ -0,0 +1,156 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+import logging
+import re
+
+from .graph import Graph
+from .taskgraph import TaskGraph
+from slugid import nice as slugid
+
+logger = logging.getLogger(__name__)
+TASK_REFERENCE_PATTERN = re.compile('<([^>]+)>')
+
+
+def optimize_task_graph(target_task_graph, params, do_not_optimize, existing_tasks=None):
+ """
+ Perform task optimization, without optimizing tasks named in
+ do_not_optimize.
+ """
+ named_links_dict = target_task_graph.graph.named_links_dict()
+ label_to_taskid = {}
+
+ # This proceeds in two phases. First, mark all optimized tasks (those
+ # which will be removed from the graph) as such, including a replacement
+ # taskId where applicable. Second, generate a new task graph containing
+ # only the non-optimized tasks, with all task labels resolved to taskIds
+ # and with task['dependencies'] populated.
+ annotate_task_graph(target_task_graph=target_task_graph,
+ params=params,
+ do_not_optimize=do_not_optimize,
+ named_links_dict=named_links_dict,
+ label_to_taskid=label_to_taskid,
+ existing_tasks=existing_tasks)
+ return get_subgraph(target_task_graph, named_links_dict, label_to_taskid), label_to_taskid
+
+
+def resolve_task_references(label, task_def, taskid_for_edge_name):
+ def repl(match):
+ key = match.group(1)
+ try:
+ return taskid_for_edge_name[key]
+ except KeyError:
+ # handle escaping '<'
+ if key == '<':
+ return key
+ raise KeyError("task '{}' has no dependency named '{}'".format(label, key))
+
+ def recurse(val):
+ if isinstance(val, list):
+ return [recurse(v) for v in val]
+ elif isinstance(val, dict):
+ if val.keys() == ['task-reference']:
+ return TASK_REFERENCE_PATTERN.sub(repl, val['task-reference'])
+ else:
+ return {k: recurse(v) for k, v in val.iteritems()}
+ else:
+ return val
+ return recurse(task_def)
+
+
+def annotate_task_graph(target_task_graph, params, do_not_optimize,
+ named_links_dict, label_to_taskid, existing_tasks):
+ """
+ Annotate each task in the graph with .optimized (boolean) and .task_id
+ (possibly None), following the rules for optimization and calling the task
+ kinds' `optimize_task` method.
+
+ As a side effect, label_to_taskid is updated with labels for all optimized
+ tasks that are replaced with existing tasks.
+ """
+
+ # set .optimized for all tasks, and .task_id for optimized tasks
+ # with replacements
+ for label in target_task_graph.graph.visit_postorder():
+ task = target_task_graph.tasks[label]
+ named_task_dependencies = named_links_dict.get(label, {})
+
+ # check whether any dependencies have been optimized away
+ dependencies = [target_task_graph.tasks[l] for l in named_task_dependencies.itervalues()]
+ for t in dependencies:
+ if t.optimized and not t.task_id:
+ raise Exception(
+ "task {} was optimized away, but {} depends on it".format(
+ t.label, label))
+
+ # if this task is blacklisted, don't even consider optimizing
+ replacement_task_id = None
+ if label in do_not_optimize:
+ optimized = False
+ # Let's check whether this task has been created before
+ elif existing_tasks is not None and label in existing_tasks:
+ optimized = True
+ replacement_task_id = existing_tasks[label]
+ # otherwise, examine the task itself (which may be an expensive operation)
+ else:
+ optimized, replacement_task_id = task.optimize(params)
+
+ task.optimized = optimized
+ task.task_id = replacement_task_id
+ if replacement_task_id:
+ label_to_taskid[label] = replacement_task_id
+
+ if optimized:
+ if replacement_task_id:
+ logger.debug("optimizing `{}`, replacing with task `{}`"
+ .format(label, replacement_task_id))
+ else:
+ logger.debug("optimizing `{}` away".format(label))
+ # note: any dependent tasks will fail when they see this
+ else:
+ if replacement_task_id:
+ raise Exception("{}: optimize_task returned False with a taskId".format(label))
+
+
+def get_subgraph(annotated_task_graph, named_links_dict, label_to_taskid):
+ """
+ Return the subgraph of annotated_task_graph consisting only of
+ non-optimized tasks and edges between them.
+
+ To avoid losing track of taskIds for tasks optimized away, this method
+ simultaneously substitutes real taskIds for task labels in the graph, and
+ populates each task definition's `dependencies` key with the appropriate
+ taskIds. Task references are resolved in the process.
+ """
+
+ # resolve labels to taskIds and populate task['dependencies']
+ tasks_by_taskid = {}
+ for label in annotated_task_graph.graph.visit_postorder():
+ task = annotated_task_graph.tasks[label]
+ if task.optimized:
+ continue
+ task.task_id = label_to_taskid[label] = slugid()
+ named_task_dependencies = {
+ name: label_to_taskid[label]
+ for name, label in named_links_dict.get(label, {}).iteritems()}
+ task.task = resolve_task_references(task.label, task.task, named_task_dependencies)
+ task.task.setdefault('dependencies', []).extend(named_task_dependencies.itervalues())
+ tasks_by_taskid[task.task_id] = task
+
+ # resolve edges to taskIds
+ edges_by_taskid = (
+ (label_to_taskid.get(left), label_to_taskid.get(right), name)
+ for (left, right, name) in annotated_task_graph.graph.edges
+ )
+ # ..and drop edges that are no longer in the task graph
+ edges_by_taskid = set(
+ (left, right, name)
+ for (left, right, name) in edges_by_taskid
+ if left in tasks_by_taskid and right in tasks_by_taskid
+ )
+
+ return TaskGraph(
+ tasks_by_taskid,
+ Graph(set(tasks_by_taskid), edges_by_taskid))
diff --git a/taskcluster/taskgraph/parameters.py b/taskcluster/taskgraph/parameters.py
new file mode 100644
index 000000000..51b9e77c7
--- /dev/null
+++ b/taskcluster/taskgraph/parameters.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import json
+import yaml
+from mozbuild.util import ReadOnlyDict
+
+# Please keep this list sorted and in sync with taskcluster/docs/parameters.rst
+PARAMETER_NAMES = set([
+ 'base_repository',
+ 'build_date',
+ 'head_ref',
+ 'head_repository',
+ 'head_rev',
+ 'level',
+ 'message',
+ 'moz_build_date',
+ 'optimize_target_tasks',
+ 'owner',
+ 'project',
+ 'pushdate',
+ 'pushlog_id',
+ 'target_tasks_method',
+ 'triggered_by',
+])
+
+
+class Parameters(ReadOnlyDict):
+ """An immutable dictionary with nicer KeyError messages on failure"""
+ def check(self):
+ names = set(self)
+ msg = []
+
+ missing = PARAMETER_NAMES - names
+ if missing:
+ msg.append("missing parameters: " + ", ".join(missing))
+
+ extra = names - PARAMETER_NAMES
+ if extra:
+ msg.append("extra parameters: " + ", ".join(extra))
+
+ if msg:
+ raise Exception("; ".join(msg))
+
+ def __getitem__(self, k):
+ if k not in PARAMETER_NAMES:
+ raise KeyError("no such parameter {!r}".format(k))
+ try:
+ return super(Parameters, self).__getitem__(k)
+ except KeyError:
+ raise KeyError("taskgraph parameter {!r} not found".format(k))
+
+
+def load_parameters_file(options):
+ """
+ Load parameters from the --parameters option
+ """
+ filename = options['parameters']
+ if not filename:
+ return Parameters()
+ with open(filename) as f:
+ if filename.endswith('.yml'):
+ return Parameters(**yaml.safe_load(f))
+ elif filename.endswith('.json'):
+ return Parameters(**json.load(f))
+ else:
+ raise TypeError("Parameters file `{}` is not JSON or YAML".format(filename))
diff --git a/taskcluster/taskgraph/target_tasks.py b/taskcluster/taskgraph/target_tasks.py
new file mode 100644
index 000000000..d2b3f5a7f
--- /dev/null
+++ b/taskcluster/taskgraph/target_tasks.py
@@ -0,0 +1,121 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+from taskgraph import try_option_syntax
+
+INTEGRATION_PROJECTS = set([
+ 'mozilla-inbound',
+ 'autoland',
+])
+
+RELEASE_PROJECTS = set([
+ 'mozilla-central',
+ 'mozilla-aurora',
+ 'mozilla-beta',
+ 'mozilla-release',
+ 'mozilla-esr52',
+])
+
+_target_task_methods = {}
+
+
+def _target_task(name):
+ def wrap(func):
+ _target_task_methods[name] = func
+ return func
+ return wrap
+
+
+def get_method(method):
+ """Get a target_task_method to pass to a TaskGraphGenerator."""
+ return _target_task_methods[method]
+
+
+@_target_task('try_option_syntax')
+def target_tasks_try_option_syntax(full_task_graph, parameters):
+ """Generate a list of target tasks based on try syntax in
+ parameters['message'] and, for context, the full task graph."""
+ options = try_option_syntax.TryOptionSyntax(parameters['message'], full_task_graph)
+ target_tasks_labels = [t.label for t in full_task_graph.tasks.itervalues()
+ if options.task_matches(t.attributes)]
+
+ # If the developer wants test jobs to be rebuilt N times we add that value here
+ if int(options.trigger_tests) > 1:
+ for l in target_tasks_labels:
+ task = full_task_graph[l]
+ if 'unittest_suite' in task.attributes:
+ task.attributes['task_duplicates'] = options.trigger_tests
+
+ # Add notifications here as well
+ if options.notifications:
+ for task in full_task_graph:
+ owner = parameters.get('owner')
+ routes = task.task.setdefault('routes', [])
+ if options.notifications == 'all':
+ routes.append("notify.email.{}.on-any".format(owner))
+ elif options.notifications == 'failure':
+ routes.append("notify.email.{}.on-failed".format(owner))
+ routes.append("notify.email.{}.on-exception".format(owner))
+
+ return target_tasks_labels
+
+
+@_target_task('default')
+def target_tasks_default(full_task_graph, parameters):
+ """Target the tasks which have indicated they should be run on this project
+ via the `run_on_projects` attributes."""
+ def filter(task):
+ run_on_projects = set(t.attributes.get('run_on_projects', []))
+ if 'all' in run_on_projects:
+ return True
+ project = parameters['project']
+ if 'integration' in run_on_projects:
+ if project in INTEGRATION_PROJECTS:
+ return True
+ if 'release' in run_on_projects:
+ if project in RELEASE_PROJECTS:
+ return True
+ return project in run_on_projects
+ return [l for l, t in full_task_graph.tasks.iteritems() if filter(t)]
+
+
+@_target_task('ash_tasks')
+def target_tasks_ash(full_task_graph, parameters):
+ """Target tasks that only run on the ash branch."""
+ def filter(task):
+ platform = task.attributes.get('build_platform')
+ # only select platforms
+ if platform not in ('linux64', 'linux64-asan', 'linux64-pgo'):
+ return False
+ # and none of this linux64-asan/debug stuff
+ if platform == 'linux64-asan' and task.attributes['build_type'] == 'debug':
+ return False
+ # no non-et10s tests
+ if task.attributes.get('unittest_suite') or task.attributes.get('talos_siute'):
+ if not task.attributes.get('e10s'):
+ return False
+ # don't upload symbols
+ if task.attributes['kind'] == 'upload-symbols':
+ return False
+ return True
+ return [l for l, t in full_task_graph.tasks.iteritems() if filter(t)]
+
+
+@_target_task('cedar_tasks')
+def target_tasks_cedar(full_task_graph, parameters):
+ """Target tasks that only run on the cedar branch."""
+ def filter(task):
+ platform = task.attributes.get('build_platform')
+ # only select platforms
+ if platform not in ['linux64']:
+ return False
+ if task.attributes.get('unittest_suite'):
+ if not (task.attributes['unittest_suite'].startswith('mochitest')
+ or 'xpcshell' in task.attributes['unittest_suite']):
+ return False
+ return True
+ return [l for l, t in full_task_graph.tasks.iteritems() if filter(t)]
diff --git a/taskcluster/taskgraph/task/__init__.py b/taskcluster/taskgraph/task/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/taskcluster/taskgraph/task/__init__.py
diff --git a/taskcluster/taskgraph/task/base.py b/taskcluster/taskgraph/task/base.py
new file mode 100644
index 000000000..2d9cbf5d9
--- /dev/null
+++ b/taskcluster/taskgraph/task/base.py
@@ -0,0 +1,108 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import abc
+
+
+class Task(object):
+ """
+ Representation of a task in a TaskGraph. Each Task has, at creation:
+
+ - kind: the name of the task kind
+ - label; the label for this task
+ - attributes: a dictionary of attributes for this task (used for filtering)
+ - task: the task definition (JSON-able dictionary)
+
+ And later, as the task-graph processing proceeds:
+
+ - task_id -- TaskCluster taskId under which this task will be created
+ - optimized -- true if this task need not be performed
+
+ A kind represents a collection of tasks that share common characteristics.
+ For example, all build jobs. Each instance of a kind is intialized with a
+ path from which it draws its task configuration. The instance is free to
+ store as much local state as it needs.
+ """
+ __metaclass__ = abc.ABCMeta
+
+ def __init__(self, kind, label, attributes, task):
+ self.kind = kind
+ self.label = label
+ self.attributes = attributes
+ self.task = task
+
+ self.task_id = None
+ self.optimized = False
+
+ self.attributes['kind'] = kind
+
+ def __eq__(self, other):
+ return self.kind == other.kind and \
+ self.label == other.label and \
+ self.attributes == other.attributes and \
+ self.task == other.task and \
+ self.task_id == other.task_id
+
+ @classmethod
+ @abc.abstractmethod
+ def load_tasks(cls, kind, path, config, parameters, loaded_tasks):
+ """
+ Load the tasks for a given kind.
+
+ The `kind` is the name of the kind; the configuration for that kind
+ named this class.
+
+ The `path` is the path to the configuration directory for the kind. This
+ can be used to load extra data, templates, etc.
+
+ The `parameters` give details on which to base the task generation.
+ See `taskcluster/docs/parameters.rst` for details.
+
+ At the time this method is called, all kinds on which this kind depends
+ (that is, specified in the `kind-dependencies` key in `self.config`
+ have already loaded their tasks, and those tasks are available in
+ the list `loaded_tasks`.
+
+ The return value is a list of Task instances.
+ """
+
+ @abc.abstractmethod
+ def get_dependencies(self, taskgraph):
+ """
+ Get the set of task labels this task depends on, by querying the full
+ task set, given as `taskgraph`.
+
+ Returns a list of (task_label, dependency_name) pairs describing the
+ dependencies.
+ """
+
+ def optimize(self, params):
+ """
+ Determine whether this task can be optimized, and if it can, what taskId
+ it should be replaced with.
+
+ The return value is a tuple `(optimized, taskId)`. If `optimized` is
+ true, then the task will be optimized (in other words, not included in
+ the task graph). If the second argument is a taskid, then any
+ dependencies on this task will isntead depend on that taskId. It is an
+ error to return no taskId for a task on which other tasks depend.
+
+ The default never optimizes.
+ """
+ return False, None
+
+ @classmethod
+ def from_json(cls, task_dict):
+ """
+ Given a data structure as produced by taskgraph.to_json, re-construct
+ the original Task object. This is used to "resume" the task-graph
+ generation process, for example in Action tasks.
+ """
+ return cls(
+ kind=task_dict['attributes']['kind'],
+ label=task_dict['label'],
+ attributes=task_dict['attributes'],
+ task=task_dict['task'])
diff --git a/taskcluster/taskgraph/task/docker_image.py b/taskcluster/taskgraph/task/docker_image.py
new file mode 100644
index 000000000..fd67c4832
--- /dev/null
+++ b/taskcluster/taskgraph/task/docker_image.py
@@ -0,0 +1,130 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import logging
+import json
+import os
+import urllib2
+
+from . import base
+from taskgraph.util.docker import (
+ docker_image,
+ generate_context_hash,
+ INDEX_PREFIX,
+)
+from taskgraph.util.templates import Templates
+
+logger = logging.getLogger(__name__)
+GECKO = os.path.realpath(os.path.join(__file__, '..', '..', '..', '..'))
+ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
+INDEX_URL = 'https://index.taskcluster.net/v1/task/{}'
+
+
+class DockerImageTask(base.Task):
+
+ def __init__(self, *args, **kwargs):
+ self.index_paths = kwargs.pop('index_paths')
+ super(DockerImageTask, self).__init__(*args, **kwargs)
+
+ def __eq__(self, other):
+ return super(DockerImageTask, self).__eq__(other) and \
+ self.index_paths == other.index_paths
+
+ @classmethod
+ def load_tasks(cls, kind, path, config, params, loaded_tasks):
+ parameters = {
+ 'pushlog_id': params.get('pushlog_id', 0),
+ 'pushdate': params['moz_build_date'],
+ 'pushtime': params['moz_build_date'][8:],
+ 'year': params['moz_build_date'][0:4],
+ 'month': params['moz_build_date'][4:6],
+ 'day': params['moz_build_date'][6:8],
+ 'project': params['project'],
+ 'docker_image': docker_image,
+ 'base_repository': params['base_repository'] or params['head_repository'],
+ 'head_repository': params['head_repository'],
+ 'head_ref': params['head_ref'] or params['head_rev'],
+ 'head_rev': params['head_rev'],
+ 'owner': params['owner'],
+ 'level': params['level'],
+ 'source': '{repo}file/{rev}/taskcluster/ci/docker-image/image.yml'
+ .format(repo=params['head_repository'], rev=params['head_rev']),
+ 'index_image_prefix': INDEX_PREFIX,
+ 'artifact_path': 'public/image.tar.zst',
+ }
+
+ tasks = []
+ templates = Templates(path)
+ for image_name, image_symbol in config['images'].iteritems():
+ context_path = os.path.join('testing', 'docker', image_name)
+ context_hash = generate_context_hash(GECKO, context_path, image_name)
+
+ image_parameters = dict(parameters)
+ image_parameters['image_name'] = image_name
+ image_parameters['context_hash'] = context_hash
+
+ image_task = templates.load('image.yml', image_parameters)
+ attributes = {'image_name': image_name}
+
+ # unique symbol for different docker image
+ if 'extra' in image_task['task']:
+ image_task['task']['extra']['treeherder']['symbol'] = image_symbol
+
+ # As an optimization, if the context hash exists for a high level, that image
+ # task ID will be used. The reasoning behind this is that eventually everything ends
+ # up on level 3 at some point if most tasks use this as a common image
+ # for a given context hash, a worker within Taskcluster does not need to contain
+ # the same image per branch.
+ index_paths = ['{}.level-{}.{}.hash.{}'.format(
+ INDEX_PREFIX, level, image_name, context_hash)
+ for level in range(int(params['level']), 4)]
+
+ tasks.append(cls(kind, 'build-docker-image-' + image_name,
+ task=image_task['task'], attributes=attributes,
+ index_paths=index_paths))
+
+ return tasks
+
+ def get_dependencies(self, taskgraph):
+ return []
+
+ def optimize(self, params):
+ for index_path in self.index_paths:
+ try:
+ url = INDEX_URL.format(index_path)
+ existing_task = json.load(urllib2.urlopen(url))
+ # Only return the task ID if the artifact exists for the indexed
+ # task. Otherwise, continue on looking at each of the branches. Method
+ # continues trying other branches in case mozilla-central has an expired
+ # artifact, but 'project' might not. Only return no task ID if all
+ # branches have been tried
+ request = urllib2.Request(
+ ARTIFACT_URL.format(existing_task['taskId'], 'public/image.tar.zst'))
+ request.get_method = lambda: 'HEAD'
+ urllib2.urlopen(request)
+
+ # HEAD success on the artifact is enough
+ return True, existing_task['taskId']
+ except urllib2.HTTPError:
+ pass
+
+ return False, None
+
+ @classmethod
+ def from_json(cls, task_dict):
+ # Generating index_paths for optimization
+ imgMeta = task_dict['task']['extra']['imageMeta']
+ image_name = imgMeta['imageName']
+ context_hash = imgMeta['contextHash']
+ index_paths = ['{}.level-{}.{}.hash.{}'.format(
+ INDEX_PREFIX, level, image_name, context_hash)
+ for level in range(int(imgMeta['level']), 4)]
+ docker_image_task = cls(kind='docker-image',
+ label=task_dict['label'],
+ attributes=task_dict['attributes'],
+ task=task_dict['task'],
+ index_paths=index_paths)
+ return docker_image_task
diff --git a/taskcluster/taskgraph/task/post_build.py b/taskcluster/taskgraph/task/post_build.py
new file mode 100644
index 000000000..09c76c44a
--- /dev/null
+++ b/taskcluster/taskgraph/task/post_build.py
@@ -0,0 +1,53 @@
+# This Source Code Form is subject to the terms of the Mozilla Public License,
+# v. 2.0. If a copy of the MPL was not distributed with this file, You can
+# obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import copy
+import logging
+
+from . import transform
+from ..util.yaml import load_yaml
+
+logger = logging.getLogger(__name__)
+
+
+class PostBuildTask(transform.TransformTask):
+ """
+ A task implementing a post-build job. These depend on jobs and perform
+ various followup tasks after a build has completed.
+
+ The `only-for-build-platforms` kind configuration, if specified, will limit
+ the build platforms for which a post-build task will be created.
+
+ The `job-template' kind configuration points to a yaml file which will
+ be used to create the input to the transforms. It will have added to it
+ keys `build-label`, the label for the build task, and `build-platform`, its
+ platform.
+ """
+
+ @classmethod
+ def get_inputs(cls, kind, path, config, params, loaded_tasks):
+ if config.get('kind-dependencies', []) != ["build"]:
+ raise Exception("PostBuildTask kinds must depend on builds")
+
+ only_platforms = config.get('only-for-build-platforms')
+ prototype = load_yaml(path, config.get('job-template'))
+
+ for task in loaded_tasks:
+ if task.kind != 'build':
+ continue
+
+ build_platform = task.attributes.get('build_platform')
+ build_type = task.attributes.get('build_type')
+ if not build_platform or not build_type:
+ continue
+ platform = "{}/{}".format(build_platform, build_type)
+ if only_platforms and platform not in only_platforms:
+ continue
+
+ post_task = copy.deepcopy(prototype)
+ post_task['build-label'] = task.label
+ post_task['build-platform'] = platform
+ yield post_task
diff --git a/taskcluster/taskgraph/task/signing.py b/taskcluster/taskgraph/task/signing.py
new file mode 100644
index 000000000..a2a9ae3d6
--- /dev/null
+++ b/taskcluster/taskgraph/task/signing.py
@@ -0,0 +1,64 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import logging
+import os
+
+from . import base
+from taskgraph.util.templates import Templates
+
+
+logger = logging.getLogger(__name__)
+GECKO = os.path.realpath(os.path.join(__file__, '..', '..', '..', '..'))
+ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
+INDEX_URL = 'https://index.taskcluster.net/v1/task/{}'
+
+
+class SigningTask(base.Task):
+
+ def __init__(self, kind, name, task, attributes):
+ self.unsigned_artifact_label = task['unsigned-task']['label']
+ super(SigningTask, self).__init__(kind, name, task=task['task'],
+ attributes=attributes)
+
+ @classmethod
+ def load_tasks(cls, kind, path, config, params, loaded_tasks):
+ root = os.path.abspath(path)
+
+ tasks = []
+ for filename in config.get('jobs-from', []):
+ templates = Templates(root)
+ jobs = templates.load(filename, {})
+
+ for name, job in jobs.iteritems():
+ for artifact in job['unsigned-task']['artifacts']:
+ url = ARTIFACT_URL.format('<{}>'.format('unsigned-artifact'), artifact)
+ job['task']['payload']['unsignedArtifacts'].append({
+ 'task-reference': url
+ })
+ attributes = job.setdefault('attributes', {})
+ attributes.update({'kind': 'signing'})
+ tasks.append(cls(kind, name, job, attributes=attributes))
+
+ return tasks
+
+ def get_dependencies(self, taskgraph):
+ return [(self.unsigned_artifact_label, 'unsigned-artifact')]
+
+ def optimize(self, params):
+ return False, None
+
+ @classmethod
+ def from_json(cls, task_dict):
+ unsigned_task_label = task_dict['dependencies']['unsigned-artifact']
+ task_dict['unsigned-task'] = {
+ 'label': unsigned_task_label
+ }
+ signing_task = cls(kind='build-signing',
+ name=task_dict['label'],
+ attributes=task_dict['attributes'],
+ task=task_dict)
+ return signing_task
diff --git a/taskcluster/taskgraph/task/test.py b/taskcluster/taskgraph/task/test.py
new file mode 100644
index 000000000..928f32a5a
--- /dev/null
+++ b/taskcluster/taskgraph/task/test.py
@@ -0,0 +1,112 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import copy
+import logging
+
+from . import transform
+from ..util.yaml import load_yaml
+
+logger = logging.getLogger(__name__)
+
+
+class TestTask(transform.TransformTask):
+ """
+ A task implementing a Gecko test.
+ """
+
+ @classmethod
+ def get_inputs(cls, kind, path, config, params, loaded_tasks):
+
+ # the kind on which this one depends
+ if len(config.get('kind-dependencies', [])) != 1:
+ raise Exception("TestTask kinds must have exactly one item in kind-dependencies")
+ dep_kind = config['kind-dependencies'][0]
+
+ # get build tasks, keyed by build platform
+ builds_by_platform = cls.get_builds_by_platform(dep_kind, loaded_tasks)
+
+ # get the test platforms for those build tasks
+ test_platforms_cfg = load_yaml(path, 'test-platforms.yml')
+ test_platforms = cls.get_test_platforms(test_platforms_cfg, builds_by_platform)
+
+ # expand the test sets for each of those platforms
+ test_sets_cfg = load_yaml(path, 'test-sets.yml')
+ test_platforms = cls.expand_tests(test_sets_cfg, test_platforms)
+
+ # load the test descriptions
+ test_descriptions = load_yaml(path, 'tests.yml')
+
+ # generate all tests for all test platforms
+ for test_platform_name, test_platform in test_platforms.iteritems():
+ for test_name in test_platform['test-names']:
+ test = copy.deepcopy(test_descriptions[test_name])
+ test['build-platform'] = test_platform['build-platform']
+ test['test-platform'] = test_platform_name
+ test['build-label'] = test_platform['build-label']
+ test['test-name'] = test_name
+
+ logger.debug("Generating tasks for {} test {} on platform {}".format(
+ kind, test_name, test['test-platform']))
+ yield test
+
+ @classmethod
+ def get_builds_by_platform(cls, dep_kind, loaded_tasks):
+ """Find the build tasks on which tests will depend, keyed by
+ platform/type. Returns a dictionary mapping build platform to task
+ label."""
+ builds_by_platform = {}
+ for task in loaded_tasks:
+ if task.kind != dep_kind:
+ continue
+
+ build_platform = task.attributes.get('build_platform')
+ build_type = task.attributes.get('build_type')
+ if not build_platform or not build_type:
+ continue
+ platform = "{}/{}".format(build_platform, build_type)
+ if platform in builds_by_platform:
+ raise Exception("multiple build jobs for " + platform)
+ builds_by_platform[platform] = task.label
+ return builds_by_platform
+
+ @classmethod
+ def get_test_platforms(cls, test_platforms_cfg, builds_by_platform):
+ """Get the test platforms for which test tasks should be generated,
+ based on the available build platforms. Returns a dictionary mapping
+ test platform to {test-set, build-platform, build-label}."""
+ test_platforms = {}
+ for test_platform, cfg in test_platforms_cfg.iteritems():
+ build_platform = cfg['build-platform']
+ if build_platform not in builds_by_platform:
+ logger.warning(
+ "No build task with platform {}; ignoring test platform {}".format(
+ build_platform, test_platform))
+ continue
+ test_platforms[test_platform] = {
+ 'test-set': cfg['test-set'],
+ 'build-platform': build_platform,
+ 'build-label': builds_by_platform[build_platform],
+ }
+ return test_platforms
+
+ @classmethod
+ def expand_tests(cls, test_sets_cfg, test_platforms):
+ """Expand the test sets in `test_platforms` out to sets of test names.
+ Returns a dictionary like `get_test_platforms`, with an additional
+ `test-names` key for each test platform, containing a set of test
+ names."""
+ rv = {}
+ for test_platform, cfg in test_platforms.iteritems():
+ test_set = cfg['test-set']
+ if test_set not in test_sets_cfg:
+ raise Exception(
+ "Test set '{}' for test platform {} is not defined".format(
+ test_set, test_platform))
+ test_names = test_sets_cfg[test_set]
+ rv[test_platform] = cfg.copy()
+ rv[test_platform]['test-names'] = test_names
+ return rv
diff --git a/taskcluster/taskgraph/task/transform.py b/taskcluster/taskgraph/task/transform.py
new file mode 100644
index 000000000..8183254a0
--- /dev/null
+++ b/taskcluster/taskgraph/task/transform.py
@@ -0,0 +1,109 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import logging
+import itertools
+
+from . import base
+from .. import files_changed
+from ..util.python_path import find_object
+from ..util.templates import merge
+from ..util.yaml import load_yaml
+from ..util.seta import is_low_value_task
+
+from ..transforms.base import TransformSequence, TransformConfig
+
+logger = logging.getLogger(__name__)
+
+
+class TransformTask(base.Task):
+ """
+ Tasks of this class are generated by applying transformations to a sequence
+ of input entities. By default, it gets those inputs from YAML data in the
+ kind directory, but subclasses may override `get_inputs` to produce them in
+ some other way.
+ """
+
+ @classmethod
+ def get_inputs(cls, kind, path, config, params, loaded_tasks):
+ """
+ Get the input elements that will be transformed into tasks. The
+ elements themselves are free-form, and become the input to the first
+ transform.
+
+ By default, this reads jobs from the `jobs` key, or from yaml files
+ named by `jobs-from`. The entities are read from mappings, and the
+ keys to those mappings are added in the `name` key of each entity.
+
+ If there is a `job-defaults` config, then every job is merged with it.
+ This provides a simple way to set default values for all jobs of a
+ kind. More complex defaults should be implemented with custom
+ transforms.
+
+ This method can be overridden in subclasses that need to perform more
+ complex calculations to generate the list of inputs.
+ """
+ def jobs():
+ defaults = config.get('job-defaults')
+ jobs = config.get('jobs', {}).iteritems()
+ jobs_from = itertools.chain.from_iterable(
+ load_yaml(path, filename).iteritems()
+ for filename in config.get('jobs-from', {}))
+ for name, job in itertools.chain(jobs, jobs_from):
+ if defaults:
+ job = merge(defaults, job)
+ yield name, job
+
+ for name, job in jobs():
+ job['name'] = name
+ logger.debug("Generating tasks for {} {}".format(kind, name))
+ yield job
+
+ @classmethod
+ def load_tasks(cls, kind, path, config, params, loaded_tasks):
+ inputs = cls.get_inputs(kind, path, config, params, loaded_tasks)
+
+ transforms = TransformSequence()
+ for xform_path in config['transforms']:
+ transform = find_object(xform_path)
+ transforms.add(transform)
+
+ # perform the transformations
+ trans_config = TransformConfig(kind, path, config, params)
+ tasks = [cls(kind, t) for t in transforms(trans_config, inputs)]
+ return tasks
+
+ def __init__(self, kind, task):
+ self.dependencies = task['dependencies']
+ self.when = task['when']
+ super(TransformTask, self).__init__(kind, task['label'],
+ task['attributes'], task['task'])
+
+ def get_dependencies(self, taskgraph):
+ return [(label, name) for name, label in self.dependencies.items()]
+
+ def optimize(self, params):
+ if 'files-changed' in self.when:
+ changed = files_changed.check(
+ params, self.when['files-changed'])
+ if not changed:
+ logger.debug('no files found matching a pattern in `when.files-changed` for ' +
+ self.label)
+ return True, None
+
+ # we would like to return 'False, None' while it's high_value_task
+ # and we wouldn't optimize it. Otherwise, it will return 'True, None'
+ if is_low_value_task(self.label, params.get('project')):
+ # Always optimize away low-value tasks
+ return True, None
+ else:
+ return False, None
+
+ @classmethod
+ def from_json(cls, task_dict):
+ # when reading back from JSON, we lose the "when" information
+ task_dict['when'] = {}
+ return cls(task_dict['attributes']['kind'], task_dict)
diff --git a/taskcluster/taskgraph/taskgraph.py b/taskcluster/taskgraph/taskgraph.py
new file mode 100644
index 000000000..7736745ef
--- /dev/null
+++ b/taskcluster/taskgraph/taskgraph.py
@@ -0,0 +1,82 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+
+from .graph import Graph
+from .util.python_path import find_object
+
+TASKCLUSTER_QUEUE_URL = "https://queue.taskcluster.net/v1/task/"
+GECKO = os.path.realpath(os.path.join(__file__, '..', '..', '..'))
+
+
+class TaskGraph(object):
+ """
+ Representation of a task graph.
+
+ A task graph is a combination of a Graph and a dictionary of tasks indexed
+ by label. TaskGraph instances should be treated as immutable.
+ """
+
+ def __init__(self, tasks, graph):
+ assert set(tasks) == graph.nodes
+ self.tasks = tasks
+ self.graph = graph
+
+ def to_json(self):
+ "Return a JSON-able object representing the task graph, as documented"
+ named_links_dict = self.graph.named_links_dict()
+ # this dictionary may be keyed by label or by taskid, so let's just call it 'key'
+ tasks = {}
+ for key in self.graph.visit_postorder():
+ task = self.tasks[key]
+ implementation = task.__class__.__module__ + ":" + task.__class__.__name__
+ task_json = {
+ 'label': task.label,
+ 'attributes': task.attributes,
+ 'dependencies': named_links_dict.get(key, {}),
+ 'task': task.task,
+ 'kind_implementation': implementation
+ }
+ if task.task_id:
+ task_json['task_id'] = task.task_id
+ tasks[key] = task_json
+ return tasks
+
+ def __getitem__(self, label):
+ "Get a task by label"
+ return self.tasks[label]
+
+ def __iter__(self):
+ "Iterate over tasks in undefined order"
+ return self.tasks.itervalues()
+
+ def __repr__(self):
+ return "<TaskGraph graph={!r} tasks={!r}>".format(self.graph, self.tasks)
+
+ def __eq__(self, other):
+ return self.tasks == other.tasks and self.graph == other.graph
+
+ @classmethod
+ def from_json(cls, tasks_dict):
+ """
+ This code is used to generate the a TaskGraph using a dictionary
+ which is representative of the TaskGraph.
+ """
+ tasks = {}
+ edges = set()
+ for key, value in tasks_dict.iteritems():
+ # We get the implementation from JSON
+ implementation = value['kind_implementation']
+ # Loading the module and creating a Task from a dictionary
+ task_kind = find_object(implementation)
+ tasks[key] = task_kind.from_json(value)
+ if 'task_id' in value:
+ tasks[key].task_id = value['task_id']
+ for depname, dep in value['dependencies'].iteritems():
+ edges.add((key, dep, depname))
+ task_graph = cls(tasks, Graph(set(tasks), edges))
+ return tasks, task_graph
diff --git a/taskcluster/taskgraph/test/__init__.py b/taskcluster/taskgraph/test/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/taskcluster/taskgraph/test/__init__.py
diff --git a/taskcluster/taskgraph/test/automationrelevance.json b/taskcluster/taskgraph/test/automationrelevance.json
new file mode 100644
index 000000000..8adfc446d
--- /dev/null
+++ b/taskcluster/taskgraph/test/automationrelevance.json
@@ -0,0 +1,425 @@
+{
+ "changesets": [
+ {
+ "author": "James Long <longster@gmail.com>",
+ "backsoutnodes": [],
+ "bugs": [
+ {
+ "no": "1300866",
+ "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1300866"
+ }
+ ],
+ "date": [
+ 1473196655.0,
+ 14400
+ ],
+ "desc": "Bug 1300866 - expose devtools require to new debugger r=jlast,bgrins",
+ "extra": {
+ "branch": "default"
+ },
+ "files": [
+ "devtools/client/debugger/new/index.html"
+ ],
+ "node": "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "parents": [
+ "37c9349b4e8167a61b08b7e119c21ea177b98942"
+ ],
+ "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "pushdate": [
+ 1473261248,
+ 0
+ ],
+ "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "pushid": 30664,
+ "pushnodes": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "99c542fa43a72ee863c813b5624048d1b443549b",
+ "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "541c9086c0f27fba60beecc9bc94543103895c86",
+ "041a925171e431bf51fb50193ab19d156088c89a",
+ "a14f88a9af7a59e677478694bafd9375ac53683e"
+ ],
+ "pushuser": "cbook@mozilla.com",
+ "rev": 312890,
+ "reviewers": [
+ {
+ "name": "jlast",
+ "revset": "reviewer(jlast)"
+ },
+ {
+ "name": "bgrins",
+ "revset": "reviewer(bgrins)"
+ }
+ ],
+ "treeherderrepo": "mozilla-central",
+ "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
+ },
+ {
+ "author": "Wes Kocher <wkocher@mozilla.com>",
+ "backsoutnodes": [],
+ "bugs": [],
+ "date": [
+ 1473208638.0,
+ 25200
+ ],
+ "desc": "Merge m-c to fx-team, a=merge",
+ "extra": {
+ "branch": "default"
+ },
+ "files": [
+ "taskcluster/scripts/builder/build-l10n.sh"
+ ],
+ "node": "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "parents": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "91c2b9d5c1354ca79e5b174591dbb03b32b15bbf"
+ ],
+ "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "pushdate": [
+ 1473261248,
+ 0
+ ],
+ "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "pushid": 30664,
+ "pushnodes": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "99c542fa43a72ee863c813b5624048d1b443549b",
+ "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "541c9086c0f27fba60beecc9bc94543103895c86",
+ "041a925171e431bf51fb50193ab19d156088c89a",
+ "a14f88a9af7a59e677478694bafd9375ac53683e"
+ ],
+ "pushuser": "cbook@mozilla.com",
+ "rev": 312891,
+ "reviewers": [
+ {
+ "name": "merge",
+ "revset": "reviewer(merge)"
+ }
+ ],
+ "treeherderrepo": "mozilla-central",
+ "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
+ },
+ {
+ "author": "Towkir Ahmed <towkir17@gmail.com>",
+ "backsoutnodes": [],
+ "bugs": [
+ {
+ "no": "1296648",
+ "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1296648"
+ }
+ ],
+ "date": [
+ 1472957580.0,
+ 14400
+ ],
+ "desc": "Bug 1296648 - Fix direction of .ruleview-expander.theme-twisty in RTL locales. r=ntim",
+ "extra": {
+ "branch": "default"
+ },
+ "files": [
+ "devtools/client/themes/rules.css"
+ ],
+ "node": "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "parents": [
+ "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6"
+ ],
+ "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "pushdate": [
+ 1473261248,
+ 0
+ ],
+ "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "pushid": 30664,
+ "pushnodes": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "99c542fa43a72ee863c813b5624048d1b443549b",
+ "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "541c9086c0f27fba60beecc9bc94543103895c86",
+ "041a925171e431bf51fb50193ab19d156088c89a",
+ "a14f88a9af7a59e677478694bafd9375ac53683e"
+ ],
+ "pushuser": "cbook@mozilla.com",
+ "rev": 312892,
+ "reviewers": [
+ {
+ "name": "ntim",
+ "revset": "reviewer(ntim)"
+ }
+ ],
+ "treeherderrepo": "mozilla-central",
+ "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
+ },
+ {
+ "author": "Oriol <oriol-bugzilla@hotmail.com>",
+ "backsoutnodes": [],
+ "bugs": [
+ {
+ "no": "1300336",
+ "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1300336"
+ }
+ ],
+ "date": [
+ 1472921160.0,
+ 14400
+ ],
+ "desc": "Bug 1300336 - Allow pseudo-arrays to have a length property. r=fitzgen",
+ "extra": {
+ "branch": "default"
+ },
+ "files": [
+ "devtools/client/webconsole/test/browser_webconsole_output_06.js",
+ "devtools/server/actors/object.js"
+ ],
+ "node": "99c542fa43a72ee863c813b5624048d1b443549b",
+ "parents": [
+ "16a1a91f9269ab95dd83eb29dc5d0227665f7d94"
+ ],
+ "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "pushdate": [
+ 1473261248,
+ 0
+ ],
+ "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "pushid": 30664,
+ "pushnodes": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "99c542fa43a72ee863c813b5624048d1b443549b",
+ "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "541c9086c0f27fba60beecc9bc94543103895c86",
+ "041a925171e431bf51fb50193ab19d156088c89a",
+ "a14f88a9af7a59e677478694bafd9375ac53683e"
+ ],
+ "pushuser": "cbook@mozilla.com",
+ "rev": 312893,
+ "reviewers": [
+ {
+ "name": "fitzgen",
+ "revset": "reviewer(fitzgen)"
+ }
+ ],
+ "treeherderrepo": "mozilla-central",
+ "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
+ },
+ {
+ "author": "Ruturaj Vartak <ruturaj@gmail.com>",
+ "backsoutnodes": [],
+ "bugs": [
+ {
+ "no": "1295010",
+ "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1295010"
+ }
+ ],
+ "date": [
+ 1472854020.0,
+ -7200
+ ],
+ "desc": "Bug 1295010 - Don't move the eyedropper to the out of browser window by keyboard navigation. r=pbro\n\nMozReview-Commit-ID: vBwmSxVNXK",
+ "extra": {
+ "amend_source": "6885024ef00cfa33d73c59dc03c48ebcda9ccbdd",
+ "branch": "default",
+ "histedit_source": "c43167f0a7cbe9f4c733b15da726e5150a9529ba",
+ "rebase_source": "b74df421630fc46dab6b6cc026bf3e0ae6b4a651"
+ },
+ "files": [
+ "devtools/client/inspector/test/browser_inspector_highlighter-eyedropper-events.js",
+ "devtools/client/inspector/test/head.js",
+ "devtools/server/actors/highlighters/eye-dropper.js"
+ ],
+ "node": "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "parents": [
+ "99c542fa43a72ee863c813b5624048d1b443549b"
+ ],
+ "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "pushdate": [
+ 1473261248,
+ 0
+ ],
+ "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "pushid": 30664,
+ "pushnodes": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "99c542fa43a72ee863c813b5624048d1b443549b",
+ "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "541c9086c0f27fba60beecc9bc94543103895c86",
+ "041a925171e431bf51fb50193ab19d156088c89a",
+ "a14f88a9af7a59e677478694bafd9375ac53683e"
+ ],
+ "pushuser": "cbook@mozilla.com",
+ "rev": 312894,
+ "reviewers": [
+ {
+ "name": "pbro",
+ "revset": "reviewer(pbro)"
+ }
+ ],
+ "treeherderrepo": "mozilla-central",
+ "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
+ },
+ {
+ "author": "Matteo Ferretti <mferretti@mozilla.com>",
+ "backsoutnodes": [],
+ "bugs": [
+ {
+ "no": "1299154",
+ "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1299154"
+ }
+ ],
+ "date": [
+ 1472629906.0,
+ -7200
+ ],
+ "desc": "Bug 1299154 - added Set/GetOverrideDPPX to restorefromHistory; r=mstange\n\nMozReview-Commit-ID: AsyAcG3Igbn\n",
+ "extra": {
+ "branch": "default",
+ "committer": "Matteo Ferretti <mferretti@mozilla.com> 1473236511 -7200"
+ },
+ "files": [
+ "docshell/base/nsDocShell.cpp",
+ "dom/tests/mochitest/general/test_contentViewer_overrideDPPX.html"
+ ],
+ "node": "541c9086c0f27fba60beecc9bc94543103895c86",
+ "parents": [
+ "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac"
+ ],
+ "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "pushdate": [
+ 1473261248,
+ 0
+ ],
+ "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "pushid": 30664,
+ "pushnodes": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "99c542fa43a72ee863c813b5624048d1b443549b",
+ "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "541c9086c0f27fba60beecc9bc94543103895c86",
+ "041a925171e431bf51fb50193ab19d156088c89a",
+ "a14f88a9af7a59e677478694bafd9375ac53683e"
+ ],
+ "pushuser": "cbook@mozilla.com",
+ "rev": 312895,
+ "reviewers": [
+ {
+ "name": "mstange",
+ "revset": "reviewer(mstange)"
+ }
+ ],
+ "treeherderrepo": "mozilla-central",
+ "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
+ },
+ {
+ "author": "Patrick Brosset <pbrosset@mozilla.com>",
+ "backsoutnodes": [],
+ "bugs": [
+ {
+ "no": "1295010",
+ "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1295010"
+ }
+ ],
+ "date": [
+ 1473239449.0,
+ -7200
+ ],
+ "desc": "Bug 1295010 - Removed testActor from highlighterHelper in inspector tests; r=me\n\nMozReview-Commit-ID: GMksl81iGcp",
+ "extra": {
+ "branch": "default"
+ },
+ "files": [
+ "devtools/client/inspector/test/browser_inspector_highlighter-eyedropper-events.js",
+ "devtools/client/inspector/test/head.js"
+ ],
+ "node": "041a925171e431bf51fb50193ab19d156088c89a",
+ "parents": [
+ "541c9086c0f27fba60beecc9bc94543103895c86"
+ ],
+ "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "pushdate": [
+ 1473261248,
+ 0
+ ],
+ "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "pushid": 30664,
+ "pushnodes": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "99c542fa43a72ee863c813b5624048d1b443549b",
+ "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "541c9086c0f27fba60beecc9bc94543103895c86",
+ "041a925171e431bf51fb50193ab19d156088c89a",
+ "a14f88a9af7a59e677478694bafd9375ac53683e"
+ ],
+ "pushuser": "cbook@mozilla.com",
+ "rev": 312896,
+ "reviewers": [
+ {
+ "name": "me",
+ "revset": "reviewer(me)"
+ }
+ ],
+ "treeherderrepo": "mozilla-central",
+ "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
+ },
+ {
+ "author": "Carsten \"Tomcat\" Book <cbook@mozilla.com>",
+ "backsoutnodes": [],
+ "bugs": [],
+ "date": [
+ 1473261233.0,
+ -7200
+ ],
+ "desc": "merge fx-team to mozilla-central a=merge",
+ "extra": {
+ "branch": "default"
+ },
+ "files": [],
+ "node": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "parents": [
+ "3d0b41fdd93bd8233745eadb4e0358e385bf2cb9",
+ "041a925171e431bf51fb50193ab19d156088c89a"
+ ],
+ "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "pushdate": [
+ 1473261248,
+ 0
+ ],
+ "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
+ "pushid": 30664,
+ "pushnodes": [
+ "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
+ "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
+ "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
+ "99c542fa43a72ee863c813b5624048d1b443549b",
+ "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
+ "541c9086c0f27fba60beecc9bc94543103895c86",
+ "041a925171e431bf51fb50193ab19d156088c89a",
+ "a14f88a9af7a59e677478694bafd9375ac53683e"
+ ],
+ "pushuser": "cbook@mozilla.com",
+ "rev": 312897,
+ "reviewers": [
+ {
+ "name": "merge",
+ "revset": "reviewer(merge)"
+ }
+ ],
+ "treeherderrepo": "mozilla-central",
+ "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
+ }
+ ],
+ "visible": true
+}
+
diff --git a/taskcluster/taskgraph/test/test_create.py b/taskcluster/taskgraph/test/test_create.py
new file mode 100644
index 000000000..b8da3aec0
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_create.py
@@ -0,0 +1,76 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import unittest
+import os
+
+from .. import create
+from ..graph import Graph
+from ..taskgraph import TaskGraph
+from .util import TestTask
+
+from mozunit import main
+
+
+class TestCreate(unittest.TestCase):
+
+ def setUp(self):
+ self.old_task_id = os.environ.get('TASK_ID')
+ if 'TASK_ID' in os.environ:
+ del os.environ['TASK_ID']
+ self.created_tasks = {}
+ self.old_create_task = create._create_task
+ create._create_task = self.fake_create_task
+
+ def tearDown(self):
+ create._create_task = self.old_create_task
+ if self.old_task_id:
+ os.environ['TASK_ID'] = self.old_task_id
+ elif 'TASK_ID' in os.environ:
+ del os.environ['TASK_ID']
+
+ def fake_create_task(self, session, task_id, label, task_def):
+ self.created_tasks[task_id] = task_def
+
+ def test_create_tasks(self):
+ tasks = {
+ 'tid-a': TestTask(label='a', task={'payload': 'hello world'}),
+ 'tid-b': TestTask(label='b', task={'payload': 'hello world'}),
+ }
+ label_to_taskid = {'a': 'tid-a', 'b': 'tid-b'}
+ graph = Graph(nodes={'tid-a', 'tid-b'}, edges={('tid-a', 'tid-b', 'edge')})
+ taskgraph = TaskGraph(tasks, graph)
+
+ create.create_tasks(taskgraph, label_to_taskid, {'level': '4'})
+
+ for tid, task in self.created_tasks.iteritems():
+ self.assertEqual(task['payload'], 'hello world')
+ self.assertEqual(task['schedulerId'], 'gecko-level-4')
+ # make sure the dependencies exist, at least
+ for depid in task.get('dependencies', []):
+ if depid is 'decisiontask':
+ # Don't look for decisiontask here
+ continue
+ self.assertIn(depid, self.created_tasks)
+
+ def test_create_task_without_dependencies(self):
+ "a task with no dependencies depends on the decision task"
+ os.environ['TASK_ID'] = 'decisiontask'
+ tasks = {
+ 'tid-a': TestTask(label='a', task={'payload': 'hello world'}),
+ }
+ label_to_taskid = {'a': 'tid-a'}
+ graph = Graph(nodes={'tid-a'}, edges=set())
+ taskgraph = TaskGraph(tasks, graph)
+
+ create.create_tasks(taskgraph, label_to_taskid, {'level': '4'})
+
+ for tid, task in self.created_tasks.iteritems():
+ self.assertEqual(task.get('dependencies'), [os.environ['TASK_ID']])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/taskcluster/taskgraph/test/test_decision.py b/taskcluster/taskgraph/test/test_decision.py
new file mode 100644
index 000000000..364965206
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_decision.py
@@ -0,0 +1,78 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+import json
+import yaml
+import shutil
+import unittest
+import tempfile
+
+from .. import decision
+from ..graph import Graph
+from ..taskgraph import TaskGraph
+from .util import TestTask
+from mozunit import main
+
+
+class TestDecision(unittest.TestCase):
+
+ def test_taskgraph_to_json(self):
+ tasks = {
+ 'a': TestTask(label='a', attributes={'attr': 'a-task'}),
+ 'b': TestTask(label='b', task={'task': 'def'}),
+ }
+ graph = Graph(nodes=set('ab'), edges={('a', 'b', 'edgelabel')})
+ taskgraph = TaskGraph(tasks, graph)
+
+ res = taskgraph.to_json()
+
+ self.assertEqual(res, {
+ 'a': {
+ 'label': 'a',
+ 'attributes': {'attr': 'a-task', 'kind': 'test'},
+ 'task': {},
+ 'dependencies': {'edgelabel': 'b'},
+ 'kind_implementation': 'taskgraph.test.util:TestTask',
+ },
+ 'b': {
+ 'label': 'b',
+ 'attributes': {'kind': 'test'},
+ 'task': {'task': 'def'},
+ 'dependencies': {},
+ 'kind_implementation': 'taskgraph.test.util:TestTask',
+ }
+ })
+
+ def test_write_artifact_json(self):
+ data = [{'some': 'data'}]
+ tmpdir = tempfile.mkdtemp()
+ try:
+ decision.ARTIFACTS_DIR = os.path.join(tmpdir, "artifacts")
+ decision.write_artifact("artifact.json", data)
+ with open(os.path.join(decision.ARTIFACTS_DIR, "artifact.json")) as f:
+ self.assertEqual(json.load(f), data)
+ finally:
+ if os.path.exists(tmpdir):
+ shutil.rmtree(tmpdir)
+ decision.ARTIFACTS_DIR = 'artifacts'
+
+ def test_write_artifact_yml(self):
+ data = [{'some': 'data'}]
+ tmpdir = tempfile.mkdtemp()
+ try:
+ decision.ARTIFACTS_DIR = os.path.join(tmpdir, "artifacts")
+ decision.write_artifact("artifact.yml", data)
+ with open(os.path.join(decision.ARTIFACTS_DIR, "artifact.yml")) as f:
+ self.assertEqual(yaml.safe_load(f), data)
+ finally:
+ if os.path.exists(tmpdir):
+ shutil.rmtree(tmpdir)
+ decision.ARTIFACTS_DIR = 'artifacts'
+
+
+if __name__ == '__main__':
+ main()
diff --git a/taskcluster/taskgraph/test/test_files_changed.py b/taskcluster/taskgraph/test/test_files_changed.py
new file mode 100644
index 000000000..0e3366b3c
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_files_changed.py
@@ -0,0 +1,73 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import json
+import os
+import unittest
+
+from .. import files_changed
+
+PARAMS = {
+ 'head_repository': 'https://hg.mozilla.org/mozilla-central',
+ 'head_rev': 'a14f88a9af7a',
+}
+
+FILES_CHANGED = [
+ 'devtools/client/debugger/new/index.html',
+ 'devtools/client/inspector/test/browser_inspector_highlighter-eyedropper-events.js',
+ 'devtools/client/inspector/test/head.js',
+ 'devtools/client/themes/rules.css',
+ 'devtools/client/webconsole/test/browser_webconsole_output_06.js',
+ 'devtools/server/actors/highlighters/eye-dropper.js',
+ 'devtools/server/actors/object.js',
+ 'docshell/base/nsDocShell.cpp',
+ 'dom/tests/mochitest/general/test_contentViewer_overrideDPPX.html',
+ 'taskcluster/scripts/builder/build-l10n.sh',
+]
+
+
+class FakeResponse:
+
+ def json(self):
+ with open(os.path.join(os.path.dirname(__file__), 'automationrelevance.json')) as f:
+ return json.load(f)
+
+
+class TestGetChangedFiles(unittest.TestCase):
+
+ def setUp(self):
+ files_changed._cache.clear()
+ self.old_get = files_changed.requests.get
+
+ def fake_get(url, **kwargs):
+ return FakeResponse()
+ files_changed.requests.get = fake_get
+
+ def tearDown(self):
+ files_changed.requests.get = self.old_get
+
+ def test_get_changed_files(self):
+ """Get_changed_files correctly gets the list of changed files in a push.
+ This tests against the production hg.mozilla.org so that it will detect
+ any changes in the format of the returned data."""
+ self.assertEqual(
+ sorted(files_changed.get_changed_files(PARAMS['head_repository'], PARAMS['head_rev'])),
+ FILES_CHANGED)
+
+
+class TestCheck(unittest.TestCase):
+
+ def setUp(self):
+ files_changed._cache[PARAMS['head_repository'], PARAMS['head_rev']] = FILES_CHANGED
+
+ def test_check_no_params(self):
+ self.assertTrue(files_changed.check({}, ["ignored"]))
+
+ def test_check_no_match(self):
+ self.assertFalse(files_changed.check(PARAMS, ["nosuch/**"]))
+
+ def test_check_match(self):
+ self.assertTrue(files_changed.check(PARAMS, ["devtools/**"]))
diff --git a/taskcluster/taskgraph/test/test_generator.py b/taskcluster/taskgraph/test/test_generator.py
new file mode 100644
index 000000000..f1b466e4d
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_generator.py
@@ -0,0 +1,129 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import unittest
+
+from ..generator import TaskGraphGenerator, Kind
+from .. import graph
+from ..task import base
+from mozunit import main
+
+
+class FakeTask(base.Task):
+
+ def __init__(self, **kwargs):
+ self.i = kwargs.pop('i')
+ super(FakeTask, self).__init__(**kwargs)
+
+ @classmethod
+ def load_tasks(cls, kind, path, config, parameters, loaded_tasks):
+ return [cls(kind=kind,
+ label='{}-t-{}'.format(kind, i),
+ attributes={'tasknum': str(i)},
+ task={},
+ i=i)
+ for i in range(3)]
+
+ def get_dependencies(self, full_task_set):
+ i = self.i
+ if i > 0:
+ return [('{}-t-{}'.format(self.kind, i - 1), 'prev')]
+ else:
+ return []
+
+ def optimize(self, params):
+ return False, None
+
+
+class FakeKind(Kind):
+
+ def _get_impl_class(self):
+ return FakeTask
+
+ def load_tasks(self, parameters, loaded_tasks):
+ FakeKind.loaded_kinds.append(self.name)
+ return super(FakeKind, self).load_tasks(parameters, loaded_tasks)
+
+
+class WithFakeKind(TaskGraphGenerator):
+
+ def _load_kinds(self):
+ for kind_name, deps in self.parameters['kinds']:
+ yield FakeKind(
+ kind_name, '/fake',
+ {'kind-dependencies': deps} if deps else {})
+
+
+class TestGenerator(unittest.TestCase):
+
+ def maketgg(self, target_tasks=None, kinds=[('fake', [])]):
+ FakeKind.loaded_kinds = []
+ self.target_tasks = target_tasks or []
+
+ def target_tasks_method(full_task_graph, parameters):
+ return self.target_tasks
+ return WithFakeKind('/root', {'kinds': kinds}, target_tasks_method)
+
+ def test_kind_ordering(self):
+ "When task kinds depend on each other, they are loaded in postorder"
+ self.tgg = self.maketgg(kinds=[
+ ('fake3', ['fake2', 'fake1']),
+ ('fake2', ['fake1']),
+ ('fake1', []),
+ ])
+ self.tgg._run_until('full_task_set')
+ self.assertEqual(FakeKind.loaded_kinds, ['fake1', 'fake2', 'fake3'])
+
+ def test_full_task_set(self):
+ "The full_task_set property has all tasks"
+ self.tgg = self.maketgg()
+ self.assertEqual(self.tgg.full_task_set.graph,
+ graph.Graph({'fake-t-0', 'fake-t-1', 'fake-t-2'}, set()))
+ self.assertEqual(sorted(self.tgg.full_task_set.tasks.keys()),
+ sorted(['fake-t-0', 'fake-t-1', 'fake-t-2']))
+
+ def test_full_task_graph(self):
+ "The full_task_graph property has all tasks, and links"
+ self.tgg = self.maketgg()
+ self.assertEqual(self.tgg.full_task_graph.graph,
+ graph.Graph({'fake-t-0', 'fake-t-1', 'fake-t-2'},
+ {
+ ('fake-t-1', 'fake-t-0', 'prev'),
+ ('fake-t-2', 'fake-t-1', 'prev'),
+ }))
+ self.assertEqual(sorted(self.tgg.full_task_graph.tasks.keys()),
+ sorted(['fake-t-0', 'fake-t-1', 'fake-t-2']))
+
+ def test_target_task_set(self):
+ "The target_task_set property has the targeted tasks"
+ self.tgg = self.maketgg(['fake-t-1'])
+ self.assertEqual(self.tgg.target_task_set.graph,
+ graph.Graph({'fake-t-1'}, set()))
+ self.assertEqual(self.tgg.target_task_set.tasks.keys(),
+ ['fake-t-1'])
+
+ def test_target_task_graph(self):
+ "The target_task_graph property has the targeted tasks and deps"
+ self.tgg = self.maketgg(['fake-t-1'])
+ self.assertEqual(self.tgg.target_task_graph.graph,
+ graph.Graph({'fake-t-0', 'fake-t-1'},
+ {('fake-t-1', 'fake-t-0', 'prev')}))
+ self.assertEqual(sorted(self.tgg.target_task_graph.tasks.keys()),
+ sorted(['fake-t-0', 'fake-t-1']))
+
+ def test_optimized_task_graph(self):
+ "The optimized task graph contains task ids"
+ self.tgg = self.maketgg(['fake-t-2'])
+ tid = self.tgg.label_to_taskid
+ self.assertEqual(
+ self.tgg.optimized_task_graph.graph,
+ graph.Graph({tid['fake-t-0'], tid['fake-t-1'], tid['fake-t-2']}, {
+ (tid['fake-t-1'], tid['fake-t-0'], 'prev'),
+ (tid['fake-t-2'], tid['fake-t-1'], 'prev'),
+ }))
+
+if __name__ == '__main__':
+ main()
diff --git a/taskcluster/taskgraph/test/test_graph.py b/taskcluster/taskgraph/test/test_graph.py
new file mode 100644
index 000000000..5c4c950a7
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_graph.py
@@ -0,0 +1,157 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import unittest
+
+from ..graph import Graph
+from mozunit import main
+
+
+class TestGraph(unittest.TestCase):
+
+ tree = Graph(set(['a', 'b', 'c', 'd', 'e', 'f', 'g']), {
+ ('a', 'b', 'L'),
+ ('a', 'c', 'L'),
+ ('b', 'd', 'K'),
+ ('b', 'e', 'K'),
+ ('c', 'f', 'N'),
+ ('c', 'g', 'N'),
+ })
+
+ linear = Graph(set(['1', '2', '3', '4']), {
+ ('1', '2', 'L'),
+ ('2', '3', 'L'),
+ ('3', '4', 'L'),
+ })
+
+ diamonds = Graph(set(['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J']),
+ set(tuple(x) for x in
+ 'AFL ADL BDL BEL CEL CHL DFL DGL EGL EHL FIL GIL GJL HJL'.split()
+ ))
+
+ multi_edges = Graph(set(['1', '2', '3', '4']), {
+ ('2', '1', 'red'),
+ ('2', '1', 'blue'),
+ ('3', '1', 'red'),
+ ('3', '2', 'blue'),
+ ('3', '2', 'green'),
+ ('4', '3', 'green'),
+ })
+
+ disjoint = Graph(set(['1', '2', '3', '4', 'α', 'β', 'γ']), {
+ ('2', '1', 'red'),
+ ('3', '1', 'red'),
+ ('3', '2', 'green'),
+ ('4', '3', 'green'),
+ ('α', 'β', 'πράσινο'),
+ ('β', 'γ', 'κόκκινο'),
+ ('α', 'γ', 'μπλε'),
+ })
+
+ def test_transitive_closure_empty(self):
+ "transitive closure of an empty set is an empty graph"
+ g = Graph(set(['a', 'b', 'c']), {('a', 'b', 'L'), ('a', 'c', 'L')})
+ self.assertEqual(g.transitive_closure(set()),
+ Graph(set(), set()))
+
+ def test_transitive_closure_disjoint(self):
+ "transitive closure of a disjoint set is a subset"
+ g = Graph(set(['a', 'b', 'c']), set())
+ self.assertEqual(g.transitive_closure(set(['a', 'c'])),
+ Graph(set(['a', 'c']), set()))
+
+ def test_transitive_closure_trees(self):
+ "transitive closure of a tree, at two non-root nodes, is the two subtrees"
+ self.assertEqual(self.tree.transitive_closure(set(['b', 'c'])),
+ Graph(set(['b', 'c', 'd', 'e', 'f', 'g']), {
+ ('b', 'd', 'K'),
+ ('b', 'e', 'K'),
+ ('c', 'f', 'N'),
+ ('c', 'g', 'N'),
+ }))
+
+ def test_transitive_closure_multi_edges(self):
+ "transitive closure of a tree with multiple edges between nodes keeps those edges"
+ self.assertEqual(self.multi_edges.transitive_closure(set(['3'])),
+ Graph(set(['1', '2', '3']), {
+ ('2', '1', 'red'),
+ ('2', '1', 'blue'),
+ ('3', '1', 'red'),
+ ('3', '2', 'blue'),
+ ('3', '2', 'green'),
+ }))
+
+ def test_transitive_closure_disjoint_edges(self):
+ "transitive closure of a disjoint graph keeps those edges"
+ self.assertEqual(self.disjoint.transitive_closure(set(['3', 'β'])),
+ Graph(set(['1', '2', '3', 'β', 'γ']), {
+ ('2', '1', 'red'),
+ ('3', '1', 'red'),
+ ('3', '2', 'green'),
+ ('β', 'γ', 'κόκκινο'),
+ }))
+
+ def test_transitive_closure_linear(self):
+ "transitive closure of a linear graph includes all nodes in the line"
+ self.assertEqual(self.linear.transitive_closure(set(['1'])), self.linear)
+
+ def test_visit_postorder_empty(self):
+ "postorder visit of an empty graph is empty"
+ self.assertEqual(list(Graph(set(), set()).visit_postorder()), [])
+
+ def assert_postorder(self, seq, all_nodes):
+ seen = set()
+ for e in seq:
+ for l, r, n in self.tree.edges:
+ if l == e:
+ self.failUnless(r in seen)
+ seen.add(e)
+ self.assertEqual(seen, all_nodes)
+
+ def test_visit_postorder_tree(self):
+ "postorder visit of a tree satisfies invariant"
+ self.assert_postorder(self.tree.visit_postorder(), self.tree.nodes)
+
+ def test_visit_postorder_diamonds(self):
+ "postorder visit of a graph full of diamonds satisfies invariant"
+ self.assert_postorder(self.diamonds.visit_postorder(), self.diamonds.nodes)
+
+ def test_visit_postorder_multi_edges(self):
+ "postorder visit of a graph with duplicate edges satisfies invariant"
+ self.assert_postorder(self.multi_edges.visit_postorder(), self.multi_edges.nodes)
+
+ def test_visit_postorder_disjoint(self):
+ "postorder visit of a disjoint graph satisfies invariant"
+ self.assert_postorder(self.disjoint.visit_postorder(), self.disjoint.nodes)
+
+ def test_links_dict(self):
+ "link dict for a graph with multiple edges is correct"
+ self.assertEqual(self.multi_edges.links_dict(), {
+ '2': set(['1']),
+ '3': set(['1', '2']),
+ '4': set(['3']),
+ })
+
+ def test_named_links_dict(self):
+ "named link dict for a graph with multiple edges is correct"
+ self.assertEqual(self.multi_edges.named_links_dict(), {
+ '2': dict(red='1', blue='1'),
+ '3': dict(red='1', blue='2', green='2'),
+ '4': dict(green='3'),
+ })
+
+ def test_reverse_links_dict(self):
+ "reverse link dict for a graph with multiple edges is correct"
+ self.assertEqual(self.multi_edges.reverse_links_dict(), {
+ '1': set(['2', '3']),
+ '2': set(['3']),
+ '3': set(['4']),
+ })
+
+if __name__ == '__main__':
+ main()
diff --git a/taskcluster/taskgraph/test/test_optimize.py b/taskcluster/taskgraph/test/test_optimize.py
new file mode 100644
index 000000000..8d2ddf247
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_optimize.py
@@ -0,0 +1,256 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import unittest
+
+from ..optimize import optimize_task_graph, resolve_task_references
+from ..optimize import annotate_task_graph, get_subgraph
+from ..taskgraph import TaskGraph
+from .. import graph
+from .util import TestTask
+
+
+class TestResolveTaskReferences(unittest.TestCase):
+
+ def do(self, input, output):
+ taskid_for_edge_name = {'edge%d' % n: 'tid%d' % n for n in range(1, 4)}
+ self.assertEqual(resolve_task_references('subject', input, taskid_for_edge_name), output)
+
+ def test_in_list(self):
+ "resolve_task_references resolves task references in a list"
+ self.do({'in-a-list': ['stuff', {'task-reference': '<edge1>'}]},
+ {'in-a-list': ['stuff', 'tid1']})
+
+ def test_in_dict(self):
+ "resolve_task_references resolves task references in a dict"
+ self.do({'in-a-dict': {'stuff': {'task-reference': '<edge2>'}}},
+ {'in-a-dict': {'stuff': 'tid2'}})
+
+ def test_multiple(self):
+ "resolve_task_references resolves multiple references in the same string"
+ self.do({'multiple': {'task-reference': 'stuff <edge1> stuff <edge2> after'}},
+ {'multiple': 'stuff tid1 stuff tid2 after'})
+
+ def test_embedded(self):
+ "resolve_task_references resolves ebmedded references"
+ self.do({'embedded': {'task-reference': 'stuff before <edge3> stuff after'}},
+ {'embedded': 'stuff before tid3 stuff after'})
+
+ def test_escaping(self):
+ "resolve_task_references resolves escapes in task references"
+ self.do({'escape': {'task-reference': '<<><edge3>>'}},
+ {'escape': '<tid3>'})
+
+ def test_invalid(self):
+ "resolve_task_references raises a KeyError on reference to an invalid task"
+ self.assertRaisesRegexp(
+ KeyError,
+ "task 'subject' has no dependency named 'no-such'",
+ lambda: resolve_task_references('subject', {'task-reference': '<no-such>'}, {})
+ )
+
+
+class OptimizingTask(TestTask):
+ # the `optimize` method on this class is overridden direclty in the tests
+ # below.
+ pass
+
+
+class TestOptimize(unittest.TestCase):
+
+ kind = None
+
+ def make_task(self, label, task_def=None, optimized=None, task_id=None):
+ task_def = task_def or {'sample': 'task-def'}
+ task = OptimizingTask(label=label, task=task_def)
+ task.optimized = optimized
+ task.task_id = task_id
+ return task
+
+ def make_graph(self, *tasks_and_edges):
+ tasks = {t.label: t for t in tasks_and_edges if isinstance(t, OptimizingTask)}
+ edges = {e for e in tasks_and_edges if not isinstance(e, OptimizingTask)}
+ return TaskGraph(tasks, graph.Graph(set(tasks), edges))
+
+ def assert_annotations(self, graph, **annotations):
+ def repl(task_id):
+ return 'SLUGID' if task_id and len(task_id) == 22 else task_id
+ got_annotations = {
+ t.label: (t.optimized, repl(t.task_id)) for t in graph.tasks.itervalues()
+ }
+ self.assertEqual(got_annotations, annotations)
+
+ def test_annotate_task_graph_no_optimize(self):
+ "annotating marks everything as un-optimized if the kind returns that"
+ OptimizingTask.optimize = lambda self, params: (False, None)
+ graph = self.make_graph(
+ self.make_task('task1'),
+ self.make_task('task2'),
+ self.make_task('task3'),
+ ('task2', 'task1', 'build'),
+ ('task2', 'task3', 'image'),
+ )
+ annotate_task_graph(graph, {}, set(), graph.graph.named_links_dict(), {}, None)
+ self.assert_annotations(
+ graph,
+ task1=(False, None),
+ task2=(False, None),
+ task3=(False, None)
+ )
+
+ def test_annotate_task_graph_taskid_without_optimize(self):
+ "raises exception if kind returns a taskid without optimizing"
+ OptimizingTask.optimize = lambda self, params: (False, 'some-taskid')
+ graph = self.make_graph(self.make_task('task1'))
+ self.assertRaises(
+ Exception,
+ lambda: annotate_task_graph(graph, {}, set(), graph.graph.named_links_dict(), {}, None)
+ )
+
+ def test_annotate_task_graph_optimize_away_dependency(self):
+ "raises exception if kind optimizes away a task on which another depends"
+ OptimizingTask.optimize = \
+ lambda self, params: (True, None) if self.label == 'task1' else (False, None)
+ graph = self.make_graph(
+ self.make_task('task1'),
+ self.make_task('task2'),
+ ('task2', 'task1', 'build'),
+ )
+ self.assertRaises(
+ Exception,
+ lambda: annotate_task_graph(graph, {}, set(), graph.graph.named_links_dict(), {}, None)
+ )
+
+ def test_annotate_task_graph_do_not_optimize(self):
+ "annotating marks everything as un-optimized if in do_not_optimize"
+ OptimizingTask.optimize = lambda self, params: (True, 'taskid')
+ graph = self.make_graph(
+ self.make_task('task1'),
+ self.make_task('task2'),
+ ('task2', 'task1', 'build'),
+ )
+ label_to_taskid = {}
+ annotate_task_graph(graph, {}, {'task1', 'task2'},
+ graph.graph.named_links_dict(), label_to_taskid, None)
+ self.assert_annotations(
+ graph,
+ task1=(False, None),
+ task2=(False, None)
+ )
+ self.assertEqual
+
+ def test_annotate_task_graph_nos_do_not_propagate(self):
+ "a task with a non-optimized dependency can be optimized"
+ OptimizingTask.optimize = \
+ lambda self, params: (False, None) if self.label == 'task1' else (True, 'taskid')
+ graph = self.make_graph(
+ self.make_task('task1'),
+ self.make_task('task2'),
+ self.make_task('task3'),
+ ('task2', 'task1', 'build'),
+ ('task2', 'task3', 'image'),
+ )
+ annotate_task_graph(graph, {}, set(),
+ graph.graph.named_links_dict(), {}, None)
+ self.assert_annotations(
+ graph,
+ task1=(False, None),
+ task2=(True, 'taskid'),
+ task3=(True, 'taskid')
+ )
+
+ def test_get_subgraph_single_dep(self):
+ "when a single dependency is optimized, it is omitted from the graph"
+ graph = self.make_graph(
+ self.make_task('task1', optimized=True, task_id='dep1'),
+ self.make_task('task2', optimized=False),
+ self.make_task('task3', optimized=False),
+ ('task2', 'task1', 'build'),
+ ('task2', 'task3', 'image'),
+ )
+ label_to_taskid = {'task1': 'dep1'}
+ sub = get_subgraph(graph, graph.graph.named_links_dict(), label_to_taskid)
+ task2 = label_to_taskid['task2']
+ task3 = label_to_taskid['task3']
+ self.assertEqual(sub.graph.nodes, {task2, task3})
+ self.assertEqual(sub.graph.edges, {(task2, task3, 'image')})
+ self.assertEqual(sub.tasks[task2].task_id, task2)
+ self.assertEqual(sorted(sub.tasks[task2].task['dependencies']),
+ sorted([task3, 'dep1']))
+ self.assertEqual(sub.tasks[task3].task_id, task3)
+ self.assertEqual(sorted(sub.tasks[task3].task['dependencies']), [])
+
+ def test_get_subgraph_dep_chain(self):
+ "when a dependency chain is optimized, it is omitted from the graph"
+ graph = self.make_graph(
+ self.make_task('task1', optimized=True, task_id='dep1'),
+ self.make_task('task2', optimized=True, task_id='dep2'),
+ self.make_task('task3', optimized=False),
+ ('task2', 'task1', 'build'),
+ ('task3', 'task2', 'image'),
+ )
+ label_to_taskid = {'task1': 'dep1', 'task2': 'dep2'}
+ sub = get_subgraph(graph, graph.graph.named_links_dict(), label_to_taskid)
+ task3 = label_to_taskid['task3']
+ self.assertEqual(sub.graph.nodes, {task3})
+ self.assertEqual(sub.graph.edges, set())
+ self.assertEqual(sub.tasks[task3].task_id, task3)
+ self.assertEqual(sorted(sub.tasks[task3].task['dependencies']), ['dep2'])
+
+ def test_get_subgraph_opt_away(self):
+ "when a leaf task is optimized away, it is omitted from the graph"
+ graph = self.make_graph(
+ self.make_task('task1', optimized=False),
+ self.make_task('task2', optimized=True),
+ ('task2', 'task1', 'build'),
+ )
+ label_to_taskid = {'task2': 'dep2'}
+ sub = get_subgraph(graph, graph.graph.named_links_dict(), label_to_taskid)
+ task1 = label_to_taskid['task1']
+ self.assertEqual(sub.graph.nodes, {task1})
+ self.assertEqual(sub.graph.edges, set())
+ self.assertEqual(sub.tasks[task1].task_id, task1)
+ self.assertEqual(sorted(sub.tasks[task1].task['dependencies']), [])
+
+ def test_get_subgraph_refs_resolved(self):
+ "get_subgraph resolves task references"
+ graph = self.make_graph(
+ self.make_task('task1', optimized=True, task_id='dep1'),
+ self.make_task(
+ 'task2',
+ optimized=False,
+ task_def={'payload': {'task-reference': 'http://<build>/<test>'}}
+ ),
+ ('task2', 'task1', 'build'),
+ ('task2', 'task3', 'test'),
+ self.make_task('task3', optimized=False),
+ )
+ label_to_taskid = {'task1': 'dep1'}
+ sub = get_subgraph(graph, graph.graph.named_links_dict(), label_to_taskid)
+ task2 = label_to_taskid['task2']
+ task3 = label_to_taskid['task3']
+ self.assertEqual(sub.graph.nodes, {task2, task3})
+ self.assertEqual(sub.graph.edges, {(task2, task3, 'test')})
+ self.assertEqual(sub.tasks[task2].task_id, task2)
+ self.assertEqual(sorted(sub.tasks[task2].task['dependencies']), sorted([task3, 'dep1']))
+ self.assertEqual(sub.tasks[task2].task['payload'], 'http://dep1/' + task3)
+ self.assertEqual(sub.tasks[task3].task_id, task3)
+
+ def test_optimize(self):
+ "optimize_task_graph annotates and extracts the subgraph from a simple graph"
+ OptimizingTask.optimize = \
+ lambda self, params: (True, 'dep1') if self.label == 'task1' else (False, None)
+ input = self.make_graph(
+ self.make_task('task1'),
+ self.make_task('task2'),
+ self.make_task('task3'),
+ ('task2', 'task1', 'build'),
+ ('task2', 'task3', 'image'),
+ )
+ opt, label_to_taskid = optimize_task_graph(input, {}, set())
+ self.assertEqual(opt.graph, graph.Graph(
+ {label_to_taskid['task2'], label_to_taskid['task3']},
+ {(label_to_taskid['task2'], label_to_taskid['task3'], 'image')}))
diff --git a/taskcluster/taskgraph/test/test_parameters.py b/taskcluster/taskgraph/test/test_parameters.py
new file mode 100644
index 000000000..43d853d7b
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_parameters.py
@@ -0,0 +1,62 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import unittest
+
+from ..parameters import Parameters, load_parameters_file, PARAMETER_NAMES
+from mozunit import main, MockedOpen
+
+
+class TestParameters(unittest.TestCase):
+
+ vals = {n: n for n in PARAMETER_NAMES}
+
+ def test_Parameters_immutable(self):
+ p = Parameters(**self.vals)
+
+ def assign():
+ p['head_ref'] = 20
+ self.assertRaises(Exception, assign)
+
+ def test_Parameters_missing_KeyError(self):
+ p = Parameters(**self.vals)
+ self.assertRaises(KeyError, lambda: p['z'])
+
+ def test_Parameters_invalid_KeyError(self):
+ """even if the value is present, if it's not a valid property, raise KeyError"""
+ p = Parameters(xyz=10, **self.vals)
+ self.assertRaises(KeyError, lambda: p['xyz'])
+
+ def test_Parameters_get(self):
+ p = Parameters(head_ref=10, level=20)
+ self.assertEqual(p['head_ref'], 10)
+
+ def test_Parameters_check(self):
+ p = Parameters(**self.vals)
+ p.check() # should not raise
+
+ def test_Parameters_check_missing(self):
+ p = Parameters()
+ self.assertRaises(Exception, lambda: p.check())
+
+ def test_Parameters_check_extra(self):
+ p = Parameters(xyz=10, **self.vals)
+ self.assertRaises(Exception, lambda: p.check())
+
+ def test_load_parameters_file_yaml(self):
+ with MockedOpen({"params.yml": "some: data\n"}):
+ self.assertEqual(
+ load_parameters_file({'parameters': 'params.yml'}),
+ {'some': 'data'})
+
+ def test_load_parameters_file_json(self):
+ with MockedOpen({"params.json": '{"some": "data"}'}):
+ self.assertEqual(
+ load_parameters_file({'parameters': 'params.json'}),
+ {'some': 'data'})
+
+if __name__ == '__main__':
+ main()
diff --git a/taskcluster/taskgraph/test/test_target_tasks.py b/taskcluster/taskgraph/test/test_target_tasks.py
new file mode 100644
index 000000000..035ccefd8
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_target_tasks.py
@@ -0,0 +1,81 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import unittest
+
+from .. import target_tasks
+from .. import try_option_syntax
+from ..graph import Graph
+from ..taskgraph import TaskGraph
+from .util import TestTask
+from mozunit import main
+
+
+class FakeTryOptionSyntax(object):
+
+ def __init__(self, message, task_graph):
+ self.trigger_tests = 0
+ self.notifications = None
+
+ def task_matches(self, attributes):
+ return 'at-at' in attributes
+
+
+class TestTargetTasks(unittest.TestCase):
+
+ def default_matches(self, run_on_projects, project):
+ method = target_tasks.get_method('default')
+ graph = TaskGraph(tasks={
+ 'a': TestTask(kind='build', label='a',
+ attributes={'run_on_projects': run_on_projects}),
+ }, graph=Graph(nodes={'a'}, edges=set()))
+ parameters = {'project': project}
+ return 'a' in method(graph, parameters)
+
+ def test_default_all(self):
+ """run_on_projects=[all] includes release, integration, and other projects"""
+ self.assertTrue(self.default_matches(['all'], 'mozilla-central'))
+ self.assertTrue(self.default_matches(['all'], 'mozilla-inbound'))
+ self.assertTrue(self.default_matches(['all'], 'mozilla-aurora'))
+ self.assertTrue(self.default_matches(['all'], 'baobab'))
+
+ def test_default_integration(self):
+ """run_on_projects=[integration] includes integration projects"""
+ self.assertFalse(self.default_matches(['integration'], 'mozilla-central'))
+ self.assertTrue(self.default_matches(['integration'], 'mozilla-inbound'))
+ self.assertFalse(self.default_matches(['integration'], 'baobab'))
+
+ def test_default_relesae(self):
+ """run_on_projects=[release] includes release projects"""
+ self.assertTrue(self.default_matches(['release'], 'mozilla-central'))
+ self.assertFalse(self.default_matches(['release'], 'mozilla-inbound'))
+ self.assertFalse(self.default_matches(['release'], 'baobab'))
+
+ def test_default_nothing(self):
+ """run_on_projects=[] includes nothing"""
+ self.assertFalse(self.default_matches([], 'mozilla-central'))
+ self.assertFalse(self.default_matches([], 'mozilla-inbound'))
+ self.assertFalse(self.default_matches([], 'baobab'))
+
+ def test_try_option_syntax(self):
+ tasks = {
+ 'a': TestTask(kind=None, label='a'),
+ 'b': TestTask(kind=None, label='b', attributes={'at-at': 'yep'}),
+ }
+ graph = Graph(nodes=set('ab'), edges=set())
+ tg = TaskGraph(tasks, graph)
+ params = {'message': 'try me'}
+
+ orig_TryOptionSyntax = try_option_syntax.TryOptionSyntax
+ try:
+ try_option_syntax.TryOptionSyntax = FakeTryOptionSyntax
+ method = target_tasks.get_method('try_option_syntax')
+ self.assertEqual(method(tg, params), ['b'])
+ finally:
+ try_option_syntax.TryOptionSyntax = orig_TryOptionSyntax
+
+if __name__ == '__main__':
+ main()
diff --git a/taskcluster/taskgraph/test/test_task_docker_image.py b/taskcluster/taskgraph/test/test_task_docker_image.py
new file mode 100644
index 000000000..8f247db3e
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_task_docker_image.py
@@ -0,0 +1,35 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import unittest
+import os
+
+from ..task import docker_image
+from mozunit import main
+
+
+KIND_PATH = os.path.join(docker_image.GECKO, 'taskcluster', 'ci', 'docker-image')
+
+
+class TestDockerImageKind(unittest.TestCase):
+
+ def setUp(self):
+ self.task = docker_image.DockerImageTask(
+ 'docker-image',
+ KIND_PATH,
+ {},
+ {},
+ index_paths=[])
+
+ def test_get_task_dependencies(self):
+ # this one's easy!
+ self.assertEqual(self.task.get_dependencies(None), [])
+
+ # TODO: optimize_task
+
+
+if __name__ == '__main__':
+ main()
diff --git a/taskcluster/taskgraph/test/test_taskgraph.py b/taskcluster/taskgraph/test/test_taskgraph.py
new file mode 100644
index 000000000..f8f09bce9
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_taskgraph.py
@@ -0,0 +1,54 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import unittest
+
+from ..graph import Graph
+from ..task.docker_image import DockerImageTask
+from ..task.transform import TransformTask
+from ..taskgraph import TaskGraph
+from mozunit import main
+from taskgraph.util.docker import INDEX_PREFIX
+
+
+class TestTargetTasks(unittest.TestCase):
+
+ def test_from_json(self):
+ task = {
+ "routes": [],
+ "extra": {
+ "imageMeta": {
+ "contextHash": "<hash>",
+ "imageName": "<image>",
+ "level": "1"
+ }
+ }
+ }
+ index_paths = ["{}.level-{}.<image>.hash.<hash>".format(INDEX_PREFIX, level)
+ for level in range(1, 4)]
+ graph = TaskGraph(tasks={
+ 'a': TransformTask(
+ kind='fancy',
+ task={
+ 'label': 'a',
+ 'attributes': {},
+ 'dependencies': {},
+ 'when': {},
+ 'task': {'task': 'def'},
+ }),
+ 'b': DockerImageTask(kind='docker-image',
+ label='b',
+ attributes={},
+ task=task,
+ index_paths=index_paths),
+ }, graph=Graph(nodes={'a', 'b'}, edges=set()))
+
+ tasks, new_graph = TaskGraph.from_json(graph.to_json())
+ self.assertEqual(graph.tasks['a'], new_graph.tasks['a'])
+ self.assertEqual(graph, new_graph)
+
+if __name__ == '__main__':
+ main()
diff --git a/taskcluster/taskgraph/test/test_transforms_base.py b/taskcluster/taskgraph/test/test_transforms_base.py
new file mode 100644
index 000000000..0a0dfcaf2
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_transforms_base.py
@@ -0,0 +1,143 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import unittest
+from mozunit import main
+from taskgraph.transforms.base import (
+ validate_schema,
+ get_keyed_by,
+ TransformSequence
+)
+from voluptuous import Schema
+
+schema = Schema({
+ 'x': int,
+ 'y': basestring,
+})
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def trans1(config, tests):
+ for test in tests:
+ test['one'] = 1
+ yield test
+
+
+@transforms.add
+def trans2(config, tests):
+ for test in tests:
+ test['two'] = 2
+ yield test
+
+
+class TestTransformSequence(unittest.TestCase):
+
+ def test_sequence(self):
+ tests = [{}, {'two': 1, 'second': True}]
+ res = list(transforms({}, tests))
+ self.assertEqual(res, [
+ {u'two': 2, u'one': 1},
+ {u'second': True, u'two': 2, u'one': 1},
+ ])
+
+
+class TestValidateSchema(unittest.TestCase):
+
+ def test_valid(self):
+ validate_schema(schema, {'x': 10, 'y': 'foo'}, "pfx")
+
+ def test_invalid(self):
+ try:
+ validate_schema(schema, {'x': 'not-int'}, "pfx")
+ self.fail("no exception raised")
+ except Exception, e:
+ self.failUnless(str(e).startswith("pfx\n"))
+
+
+class TestKeyedBy(unittest.TestCase):
+
+ def test_simple_value(self):
+ test = {
+ 'test-name': 'tname',
+ 'option': 10,
+ }
+ self.assertEqual(get_keyed_by(test, 'option', 'x'), 10)
+
+ def test_by_value(self):
+ test = {
+ 'test-name': 'tname',
+ 'option': {
+ 'by-other-value': {
+ 'a': 10,
+ 'b': 20,
+ },
+ },
+ 'other-value': 'b',
+ }
+ self.assertEqual(get_keyed_by(test, 'option', 'x'), 20)
+
+ def test_by_value_regex(self):
+ test = {
+ 'test-name': 'tname',
+ 'option': {
+ 'by-test-platform': {
+ 'macosx64/.*': 10,
+ 'linux64/debug': 20,
+ 'default': 5,
+ },
+ },
+ 'test-platform': 'macosx64/debug',
+ }
+ self.assertEqual(get_keyed_by(test, 'option', 'x'), 10)
+
+ def test_by_value_default(self):
+ test = {
+ 'test-name': 'tname',
+ 'option': {
+ 'by-other-value': {
+ 'a': 10,
+ 'default': 30,
+ },
+ },
+ 'other-value': 'xxx',
+ }
+ self.assertEqual(get_keyed_by(test, 'option', 'x'), 30)
+
+ def test_by_value_invalid_dict(self):
+ test = {
+ 'test-name': 'tname',
+ 'option': {
+ 'by-something-else': {},
+ 'by-other-value': {},
+ },
+ }
+ self.assertRaises(Exception, get_keyed_by, test, 'option', 'x')
+
+ def test_by_value_invalid_no_default(self):
+ test = {
+ 'test-name': 'tname',
+ 'option': {
+ 'by-other-value': {
+ 'a': 10,
+ },
+ },
+ 'other-value': 'b',
+ }
+ self.assertRaises(Exception, get_keyed_by, test, 'option', 'x')
+
+ def test_by_value_invalid_no_by(self):
+ test = {
+ 'test-name': 'tname',
+ 'option': {
+ 'other-value': {},
+ },
+ }
+ self.assertRaises(Exception, get_keyed_by, test, 'option', 'x')
+
+if __name__ == '__main__':
+ main()
diff --git a/taskcluster/taskgraph/test/test_try_option_syntax.py b/taskcluster/taskgraph/test/test_try_option_syntax.py
new file mode 100644
index 000000000..29aa2d5a9
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_try_option_syntax.py
@@ -0,0 +1,274 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import unittest
+import itertools
+
+from ..try_option_syntax import TryOptionSyntax
+from ..try_option_syntax import RIDEALONG_BUILDS
+from ..graph import Graph
+from ..taskgraph import TaskGraph
+from .util import TestTask
+from mozunit import main
+
+# an empty graph, for things that don't look at it
+empty_graph = TaskGraph({}, Graph(set(), set()))
+
+
+def unittest_task(n, tp):
+ return (n, TestTask('test', n, {
+ 'unittest_try_name': n,
+ 'test_platform': tp,
+ }))
+
+
+def talos_task(n, tp):
+ return (n, TestTask('test', n, {
+ 'talos_try_name': n,
+ 'test_platform': tp,
+ }))
+
+tasks = {k: v for k, v in [
+ unittest_task('mochitest-browser-chrome', 'linux'),
+ unittest_task('mochitest-e10s-browser-chrome', 'linux64'),
+ unittest_task('mochitest-chrome', 'linux'),
+ unittest_task('mochitest-webgl', 'linux'),
+ unittest_task('crashtest-e10s', 'linux'),
+ unittest_task('gtest', 'linux64'),
+ talos_task('dromaeojs', 'linux64'),
+]}
+unittest_tasks = {k: v for k, v in tasks.iteritems()
+ if 'unittest_try_name' in v.attributes}
+talos_tasks = {k: v for k, v in tasks.iteritems()
+ if 'talos_try_name' in v.attributes}
+graph_with_jobs = TaskGraph(tasks, Graph(set(tasks), set()))
+
+
+class TestTryOptionSyntax(unittest.TestCase):
+
+ def test_empty_message(self):
+ "Given an empty message, it should return an empty value"
+ tos = TryOptionSyntax('', empty_graph)
+ self.assertEqual(tos.build_types, [])
+ self.assertEqual(tos.jobs, [])
+ self.assertEqual(tos.unittests, [])
+ self.assertEqual(tos.talos, [])
+ self.assertEqual(tos.platforms, [])
+
+ def test_message_without_try(self):
+ "Given a non-try message, it should return an empty value"
+ tos = TryOptionSyntax('Bug 1234: frobnicte the foo', empty_graph)
+ self.assertEqual(tos.build_types, [])
+ self.assertEqual(tos.jobs, [])
+ self.assertEqual(tos.unittests, [])
+ self.assertEqual(tos.talos, [])
+ self.assertEqual(tos.platforms, [])
+
+ def test_unknown_args(self):
+ "unknown arguments are ignored"
+ tos = TryOptionSyntax('try: --doubledash -z extra', empty_graph)
+ # equilvant to "try:"..
+ self.assertEqual(tos.build_types, [])
+ self.assertEqual(tos.jobs, None)
+
+ def test_b_do(self):
+ "-b do should produce both build_types"
+ tos = TryOptionSyntax('try: -b do', empty_graph)
+ self.assertEqual(sorted(tos.build_types), ['debug', 'opt'])
+
+ def test_b_d(self):
+ "-b d should produce build_types=['debug']"
+ tos = TryOptionSyntax('try: -b d', empty_graph)
+ self.assertEqual(sorted(tos.build_types), ['debug'])
+
+ def test_b_o(self):
+ "-b o should produce build_types=['opt']"
+ tos = TryOptionSyntax('try: -b o', empty_graph)
+ self.assertEqual(sorted(tos.build_types), ['opt'])
+
+ def test_build_o(self):
+ "--build o should produce build_types=['opt']"
+ tos = TryOptionSyntax('try: --build o', empty_graph)
+ self.assertEqual(sorted(tos.build_types), ['opt'])
+
+ def test_b_dx(self):
+ "-b dx should produce build_types=['debug'], silently ignoring the x"
+ tos = TryOptionSyntax('try: -b dx', empty_graph)
+ self.assertEqual(sorted(tos.build_types), ['debug'])
+
+ def test_j_job(self):
+ "-j somejob sets jobs=['somejob']"
+ tos = TryOptionSyntax('try: -j somejob', empty_graph)
+ self.assertEqual(sorted(tos.jobs), ['somejob'])
+
+ def test_j_jobs(self):
+ "-j job1,job2 sets jobs=['job1', 'job2']"
+ tos = TryOptionSyntax('try: -j job1,job2', empty_graph)
+ self.assertEqual(sorted(tos.jobs), ['job1', 'job2'])
+
+ def test_j_all(self):
+ "-j all sets jobs=None"
+ tos = TryOptionSyntax('try: -j all', empty_graph)
+ self.assertEqual(tos.jobs, None)
+
+ def test_j_twice(self):
+ "-j job1 -j job2 sets jobs=job1, job2"
+ tos = TryOptionSyntax('try: -j job1 -j job2', empty_graph)
+ self.assertEqual(sorted(tos.jobs), sorted(['job1', 'job2']))
+
+ def test_p_all(self):
+ "-p all sets platforms=None"
+ tos = TryOptionSyntax('try: -p all', empty_graph)
+ self.assertEqual(tos.platforms, None)
+
+ def test_p_linux(self):
+ "-p linux sets platforms=['linux', 'linux-l10n']"
+ tos = TryOptionSyntax('try: -p linux', empty_graph)
+ self.assertEqual(tos.platforms, ['linux', 'linux-l10n'])
+
+ def test_p_linux_win32(self):
+ "-p linux,win32 sets platforms=['linux', 'linux-l10n', 'win32']"
+ tos = TryOptionSyntax('try: -p linux,win32', empty_graph)
+ self.assertEqual(sorted(tos.platforms), ['linux', 'linux-l10n', 'win32'])
+
+ def test_p_expands_ridealongs(self):
+ "-p linux,linux64 includes the RIDEALONG_BUILDS"
+ tos = TryOptionSyntax('try: -p linux,linux64', empty_graph)
+ ridealongs = list(task
+ for task in itertools.chain.from_iterable(
+ RIDEALONG_BUILDS.itervalues()
+ )
+ if 'android' not in task) # Don't include android-l10n
+ self.assertEqual(sorted(tos.platforms), sorted(['linux', 'linux64'] + ridealongs))
+
+ def test_u_none(self):
+ "-u none sets unittests=[]"
+ tos = TryOptionSyntax('try: -u none', graph_with_jobs)
+ self.assertEqual(sorted(tos.unittests), [])
+
+ def test_u_all(self):
+ "-u all sets unittests=[..whole list..]"
+ tos = TryOptionSyntax('try: -u all', graph_with_jobs)
+ self.assertEqual(sorted(tos.unittests), sorted([{'test': t} for t in unittest_tasks]))
+
+ def test_u_single(self):
+ "-u mochitest-webgl sets unittests=[mochitest-webgl]"
+ tos = TryOptionSyntax('try: -u mochitest-webgl', graph_with_jobs)
+ self.assertEqual(sorted(tos.unittests), sorted([{'test': 'mochitest-webgl'}]))
+
+ def test_u_alias(self):
+ "-u mochitest-gl sets unittests=[mochitest-webgl]"
+ tos = TryOptionSyntax('try: -u mochitest-gl', graph_with_jobs)
+ self.assertEqual(sorted(tos.unittests), sorted([{'test': 'mochitest-webgl'}]))
+
+ def test_u_multi_alias(self):
+ "-u e10s sets unittests=[all e10s unittests]"
+ tos = TryOptionSyntax('try: -u e10s', graph_with_jobs)
+ self.assertEqual(sorted(tos.unittests), sorted([
+ {'test': t} for t in unittest_tasks if 'e10s' in t
+ ]))
+
+ def test_u_commas(self):
+ "-u mochitest-webgl,gtest sets unittests=both"
+ tos = TryOptionSyntax('try: -u mochitest-webgl,gtest', graph_with_jobs)
+ self.assertEqual(sorted(tos.unittests), sorted([
+ {'test': 'mochitest-webgl'},
+ {'test': 'gtest'},
+ ]))
+
+ def test_u_chunks(self):
+ "-u gtest-3,gtest-4 selects the third and fourth chunk of gtest"
+ tos = TryOptionSyntax('try: -u gtest-3,gtest-4', graph_with_jobs)
+ self.assertEqual(sorted(tos.unittests), sorted([
+ {'test': 'gtest', 'only_chunks': set('34')},
+ ]))
+
+ def test_u_platform(self):
+ "-u gtest[linux] selects the linux platform for gtest"
+ tos = TryOptionSyntax('try: -u gtest[linux]', graph_with_jobs)
+ self.assertEqual(sorted(tos.unittests), sorted([
+ {'test': 'gtest', 'platforms': ['linux']},
+ ]))
+
+ def test_u_platforms(self):
+ "-u gtest[linux,win32] selects the linux and win32 platforms for gtest"
+ tos = TryOptionSyntax('try: -u gtest[linux,win32]', graph_with_jobs)
+ self.assertEqual(sorted(tos.unittests), sorted([
+ {'test': 'gtest', 'platforms': ['linux', 'win32']},
+ ]))
+
+ def test_u_platforms_pretty(self):
+ "-u gtest[Ubuntu] selects the linux, linux64 and linux64-asan platforms for gtest"
+ tos = TryOptionSyntax('try: -u gtest[Ubuntu]', graph_with_jobs)
+ self.assertEqual(sorted(tos.unittests), sorted([
+ {'test': 'gtest', 'platforms': ['linux', 'linux64', 'linux64-asan']},
+ ]))
+
+ def test_u_platforms_negated(self):
+ "-u gtest[-linux] selects all platforms but linux for gtest"
+ tos = TryOptionSyntax('try: -u gtest[-linux]', graph_with_jobs)
+ self.assertEqual(sorted(tos.unittests), sorted([
+ {'test': 'gtest', 'platforms': ['linux64']},
+ ]))
+
+ def test_u_platforms_negated_pretty(self):
+ "-u gtest[Ubuntu,-x64] selects just linux for gtest"
+ tos = TryOptionSyntax('try: -u gtest[Ubuntu,-x64]', graph_with_jobs)
+ self.assertEqual(sorted(tos.unittests), sorted([
+ {'test': 'gtest', 'platforms': ['linux']},
+ ]))
+
+ def test_u_chunks_platforms(self):
+ "-u gtest-1[linux,win32] selects the linux and win32 platforms for chunk 1 of gtest"
+ tos = TryOptionSyntax('try: -u gtest-1[linux,win32]', graph_with_jobs)
+ self.assertEqual(sorted(tos.unittests), sorted([
+ {'test': 'gtest', 'platforms': ['linux', 'win32'], 'only_chunks': set('1')},
+ ]))
+
+ def test_t_none(self):
+ "-t none sets talos=[]"
+ tos = TryOptionSyntax('try: -t none', graph_with_jobs)
+ self.assertEqual(sorted(tos.talos), [])
+
+ def test_t_all(self):
+ "-t all sets talos=[..whole list..]"
+ tos = TryOptionSyntax('try: -t all', graph_with_jobs)
+ self.assertEqual(sorted(tos.talos), sorted([{'test': t} for t in talos_tasks]))
+
+ def test_t_single(self):
+ "-t mochitest-webgl sets talos=[mochitest-webgl]"
+ tos = TryOptionSyntax('try: -t mochitest-webgl', graph_with_jobs)
+ self.assertEqual(sorted(tos.talos), sorted([{'test': 'mochitest-webgl'}]))
+
+ # -t shares an implementation with -u, so it's not tested heavily
+
+ def test_trigger_tests(self):
+ "--rebuild 10 sets trigger_tests"
+ tos = TryOptionSyntax('try: --rebuild 10', empty_graph)
+ self.assertEqual(tos.trigger_tests, 10)
+
+ def test_interactive(self):
+ "--interactive sets interactive"
+ tos = TryOptionSyntax('try: --interactive', empty_graph)
+ self.assertEqual(tos.interactive, True)
+
+ def test_all_email(self):
+ "--all-emails sets notifications"
+ tos = TryOptionSyntax('try: --all-emails', empty_graph)
+ self.assertEqual(tos.notifications, 'all')
+
+ def test_fail_email(self):
+ "--failure-emails sets notifications"
+ tos = TryOptionSyntax('try: --failure-emails', empty_graph)
+ self.assertEqual(tos.notifications, 'failure')
+
+ def test_no_email(self):
+ "no email settings don't set notifications"
+ tos = TryOptionSyntax('try:', empty_graph)
+ self.assertEqual(tos.notifications, None)
+
+if __name__ == '__main__':
+ main()
diff --git a/taskcluster/taskgraph/test/test_util_attributes.py b/taskcluster/taskgraph/test/test_util_attributes.py
new file mode 100644
index 000000000..c3575e917
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_util_attributes.py
@@ -0,0 +1,45 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+from taskgraph.util.attributes import attrmatch
+
+
+class Attrmatch(unittest.TestCase):
+
+ def test_trivial_match(self):
+ """Given no conditions, anything matches"""
+ self.assertTrue(attrmatch({}))
+
+ def test_missing_attribute(self):
+ """If a filtering attribute is not present, no match"""
+ self.assertFalse(attrmatch({}, someattr=10))
+
+ def test_literal_attribute(self):
+ """Literal attributes must match exactly"""
+ self.assertTrue(attrmatch({'att': 10}, att=10))
+ self.assertFalse(attrmatch({'att': 10}, att=20))
+
+ def test_set_attribute(self):
+ """Set attributes require set membership"""
+ self.assertTrue(attrmatch({'att': 10}, att=set([9, 10])))
+ self.assertFalse(attrmatch({'att': 10}, att=set([19, 20])))
+
+ def test_callable_attribute(self):
+ """Callable attributes are called and any False causes the match to fail"""
+ self.assertTrue(attrmatch({'att': 10}, att=lambda val: True))
+ self.assertFalse(attrmatch({'att': 10}, att=lambda val: False))
+
+ def even(val):
+ return val % 2 == 0
+ self.assertTrue(attrmatch({'att': 10}, att=even))
+ self.assertFalse(attrmatch({'att': 11}, att=even))
+
+ def test_all_matches_required(self):
+ """If only one attribute does not match, the result is False"""
+ self.assertFalse(attrmatch({'a': 1}, a=1, b=2, c=3))
+ self.assertFalse(attrmatch({'a': 1, 'b': 2}, a=1, b=2, c=3))
+ self.assertTrue(attrmatch({'a': 1, 'b': 2, 'c': 3}, a=1, b=2, c=3))
diff --git a/taskcluster/taskgraph/test/test_util_docker.py b/taskcluster/taskgraph/test/test_util_docker.py
new file mode 100644
index 000000000..3bb7fe8f0
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_util_docker.py
@@ -0,0 +1,194 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+import shutil
+import stat
+import tarfile
+import tempfile
+import unittest
+
+from ..util import docker
+from mozunit import MockedOpen
+
+
+MODE_STANDARD = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
+
+
+class TestDocker(unittest.TestCase):
+
+ def test_generate_context_hash(self):
+ tmpdir = tempfile.mkdtemp()
+ old_GECKO = docker.GECKO
+ docker.GECKO = tmpdir
+ try:
+ os.makedirs(os.path.join(tmpdir, 'docker', 'my-image'))
+ p = os.path.join(tmpdir, 'docker', 'my-image', 'Dockerfile')
+ with open(p, 'w') as f:
+ f.write("FROM node\nADD a-file\n")
+ os.chmod(p, MODE_STANDARD)
+ p = os.path.join(tmpdir, 'docker', 'my-image', 'a-file')
+ with open(p, 'w') as f:
+ f.write("data\n")
+ os.chmod(p, MODE_STANDARD)
+ self.assertEqual(
+ docker.generate_context_hash(docker.GECKO,
+ os.path.join(docker.GECKO, 'docker/my-image'),
+ 'my-image'),
+ 'e61e675ce05e8c11424437db3f1004079374c1a5fe6ad6800346cebe137b0797'
+ )
+ finally:
+ docker.GECKO = old_GECKO
+ shutil.rmtree(tmpdir)
+
+ def test_docker_image_explicit_registry(self):
+ files = {}
+ files["{}/myimage/REGISTRY".format(docker.DOCKER_ROOT)] = "cool-images"
+ files["{}/myimage/VERSION".format(docker.DOCKER_ROOT)] = "1.2.3"
+ with MockedOpen(files):
+ self.assertEqual(docker.docker_image('myimage'), "cool-images/myimage:1.2.3")
+
+ def test_docker_image_default_registry(self):
+ files = {}
+ files["{}/REGISTRY".format(docker.DOCKER_ROOT)] = "mozilla"
+ files["{}/myimage/VERSION".format(docker.DOCKER_ROOT)] = "1.2.3"
+ with MockedOpen(files):
+ self.assertEqual(docker.docker_image('myimage'), "mozilla/myimage:1.2.3")
+
+ def test_create_context_tar_basic(self):
+ tmp = tempfile.mkdtemp()
+ try:
+ d = os.path.join(tmp, 'test_image')
+ os.mkdir(d)
+ with open(os.path.join(d, 'Dockerfile'), 'a'):
+ pass
+ os.chmod(os.path.join(d, 'Dockerfile'), MODE_STANDARD)
+
+ with open(os.path.join(d, 'extra'), 'a'):
+ pass
+ os.chmod(os.path.join(d, 'extra'), MODE_STANDARD)
+
+ tp = os.path.join(tmp, 'tar')
+ h = docker.create_context_tar(tmp, d, tp, 'my_image')
+ self.assertEqual(h, '2a6d7f1627eba60daf85402418e041d728827d309143c6bc1c6bb3035bde6717')
+
+ # File prefix should be "my_image"
+ with tarfile.open(tp, 'r:gz') as tf:
+ self.assertEqual(tf.getnames(), [
+ 'my_image/Dockerfile',
+ 'my_image/extra',
+ ])
+ finally:
+ shutil.rmtree(tmp)
+
+ def test_create_context_topsrcdir_files(self):
+ tmp = tempfile.mkdtemp()
+ try:
+ d = os.path.join(tmp, 'test-image')
+ os.mkdir(d)
+ with open(os.path.join(d, 'Dockerfile'), 'wb') as fh:
+ fh.write(b'# %include extra/file0\n')
+ os.chmod(os.path.join(d, 'Dockerfile'), MODE_STANDARD)
+
+ extra = os.path.join(tmp, 'extra')
+ os.mkdir(extra)
+ with open(os.path.join(extra, 'file0'), 'a'):
+ pass
+ os.chmod(os.path.join(extra, 'file0'), MODE_STANDARD)
+
+ tp = os.path.join(tmp, 'tar')
+ h = docker.create_context_tar(tmp, d, tp, 'test_image')
+ self.assertEqual(h, '20faeb7c134f21187b142b5fadba94ae58865dc929c6c293d8cbc0a087269338')
+
+ with tarfile.open(tp, 'r:gz') as tf:
+ self.assertEqual(tf.getnames(), [
+ 'test_image/Dockerfile',
+ 'test_image/topsrcdir/extra/file0',
+ ])
+ finally:
+ shutil.rmtree(tmp)
+
+ def test_create_context_absolute_path(self):
+ tmp = tempfile.mkdtemp()
+ try:
+ d = os.path.join(tmp, 'test-image')
+ os.mkdir(d)
+
+ # Absolute paths in %include syntax are not allowed.
+ with open(os.path.join(d, 'Dockerfile'), 'wb') as fh:
+ fh.write(b'# %include /etc/shadow\n')
+
+ with self.assertRaisesRegexp(Exception, 'cannot be absolute'):
+ docker.create_context_tar(tmp, d, os.path.join(tmp, 'tar'), 'test')
+ finally:
+ shutil.rmtree(tmp)
+
+ def test_create_context_outside_topsrcdir(self):
+ tmp = tempfile.mkdtemp()
+ try:
+ d = os.path.join(tmp, 'test-image')
+ os.mkdir(d)
+
+ with open(os.path.join(d, 'Dockerfile'), 'wb') as fh:
+ fh.write(b'# %include foo/../../../etc/shadow\n')
+
+ with self.assertRaisesRegexp(Exception, 'path outside topsrcdir'):
+ docker.create_context_tar(tmp, d, os.path.join(tmp, 'tar'), 'test')
+ finally:
+ shutil.rmtree(tmp)
+
+ def test_create_context_missing_extra(self):
+ tmp = tempfile.mkdtemp()
+ try:
+ d = os.path.join(tmp, 'test-image')
+ os.mkdir(d)
+
+ with open(os.path.join(d, 'Dockerfile'), 'wb') as fh:
+ fh.write(b'# %include does/not/exist\n')
+
+ with self.assertRaisesRegexp(Exception, 'path does not exist'):
+ docker.create_context_tar(tmp, d, os.path.join(tmp, 'tar'), 'test')
+ finally:
+ shutil.rmtree(tmp)
+
+ def test_create_context_extra_directory(self):
+ tmp = tempfile.mkdtemp()
+ try:
+ d = os.path.join(tmp, 'test-image')
+ os.mkdir(d)
+
+ with open(os.path.join(d, 'Dockerfile'), 'wb') as fh:
+ fh.write(b'# %include extra\n')
+ fh.write(b'# %include file0\n')
+ os.chmod(os.path.join(d, 'Dockerfile'), MODE_STANDARD)
+
+ extra = os.path.join(tmp, 'extra')
+ os.mkdir(extra)
+ for i in range(3):
+ p = os.path.join(extra, 'file%d' % i)
+ with open(p, 'wb') as fh:
+ fh.write(b'file%d' % i)
+ os.chmod(p, MODE_STANDARD)
+
+ with open(os.path.join(tmp, 'file0'), 'a'):
+ pass
+ os.chmod(os.path.join(tmp, 'file0'), MODE_STANDARD)
+
+ tp = os.path.join(tmp, 'tar')
+ h = docker.create_context_tar(tmp, d, tp, 'my_image')
+
+ self.assertEqual(h, 'e5440513ab46ae4c1d056269e1c6715d5da7d4bd673719d360411e35e5b87205')
+
+ with tarfile.open(tp, 'r:gz') as tf:
+ self.assertEqual(tf.getnames(), [
+ 'my_image/Dockerfile',
+ 'my_image/topsrcdir/extra/file0',
+ 'my_image/topsrcdir/extra/file1',
+ 'my_image/topsrcdir/extra/file2',
+ 'my_image/topsrcdir/file0',
+ ])
+ finally:
+ shutil.rmtree(tmp)
diff --git a/taskcluster/taskgraph/test/test_util_python_path.py b/taskcluster/taskgraph/test/test_util_python_path.py
new file mode 100644
index 000000000..9615d1347
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_util_python_path.py
@@ -0,0 +1,31 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import unittest
+from ..util import python_path
+
+
+class TestObject(object):
+
+ testClassProperty = object()
+
+
+class TestPythonPath(unittest.TestCase):
+
+ def test_find_object_no_such_module(self):
+ """find_object raises ImportError for a nonexistent module"""
+ self.assertRaises(ImportError, python_path.find_object, "no_such_module:someobj")
+
+ def test_find_object_no_such_object(self):
+ """find_object raises AttributeError for a nonexistent object"""
+ self.assertRaises(AttributeError, python_path.find_object,
+ "taskgraph.test.test_util_python_path:NoSuchObject")
+
+ def test_find_object_exists(self):
+ """find_object finds an existing object"""
+ obj = python_path.find_object(
+ "taskgraph.test.test_util_python_path:TestObject.testClassProperty")
+ self.assertIs(obj, TestObject.testClassProperty)
diff --git a/taskcluster/taskgraph/test/test_util_templates.py b/taskcluster/taskgraph/test/test_util_templates.py
new file mode 100755
index 000000000..47f7494a0
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_util_templates.py
@@ -0,0 +1,232 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import unittest
+import mozunit
+import textwrap
+from taskgraph.util.templates import (
+ merge_to,
+ merge,
+ Templates,
+ TemplatesException
+)
+
+files = {}
+files['/fixtures/circular.yml'] = textwrap.dedent("""\
+ $inherits:
+ from: 'circular_ref.yml'
+ variables:
+ woot: 'inherit'
+ """)
+
+files['/fixtures/inherit.yml'] = textwrap.dedent("""\
+ $inherits:
+ from: 'templates.yml'
+ variables:
+ woot: 'inherit'
+ """)
+
+files['/fixtures/extend_child.yml'] = textwrap.dedent("""\
+ list: ['1', '2', '3']
+ was_list: ['1']
+ obj:
+ level: 1
+ deeper:
+ woot: 'bar'
+ list: ['baz']
+ """)
+
+files['/fixtures/circular_ref.yml'] = textwrap.dedent("""\
+ $inherits:
+ from: 'circular.yml'
+ """)
+
+files['/fixtures/child_pass.yml'] = textwrap.dedent("""\
+ values:
+ - {{a}}
+ - {{b}}
+ - {{c}}
+ """)
+
+files['/fixtures/inherit_pass.yml'] = textwrap.dedent("""\
+ $inherits:
+ from: 'child_pass.yml'
+ variables:
+ a: 'a'
+ b: 'b'
+ c: 'c'
+ """)
+
+files['/fixtures/deep/2.yml'] = textwrap.dedent("""\
+ $inherits:
+ from: deep/1.yml
+
+ """)
+
+files['/fixtures/deep/3.yml'] = textwrap.dedent("""\
+ $inherits:
+ from: deep/2.yml
+
+ """)
+
+files['/fixtures/deep/4.yml'] = textwrap.dedent("""\
+ $inherits:
+ from: deep/3.yml
+ """)
+
+files['/fixtures/deep/1.yml'] = textwrap.dedent("""\
+ variable: {{value}}
+ """)
+
+files['/fixtures/simple.yml'] = textwrap.dedent("""\
+ is_simple: true
+ """)
+
+files['/fixtures/templates.yml'] = textwrap.dedent("""\
+ content: 'content'
+ variable: '{{woot}}'
+ """)
+
+files['/fixtures/extend_parent.yml'] = textwrap.dedent("""\
+ $inherits:
+ from: 'extend_child.yml'
+
+ list: ['4']
+ was_list:
+ replaced: true
+ obj:
+ level: 2
+ from_parent: true
+ deeper:
+ list: ['bar']
+ """)
+
+
+class TemplatesTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mocked_open = mozunit.MockedOpen(files)
+ self.mocked_open.__enter__()
+ self.subject = Templates('/fixtures')
+
+ def tearDown(self):
+ self.mocked_open.__exit__(None, None, None)
+
+ def test_invalid_path(self):
+ with self.assertRaisesRegexp(TemplatesException, 'must be a directory'):
+ Templates('/zomg/not/a/dir')
+
+ def test_no_templates(self):
+ content = self.subject.load('simple.yml', {})
+ self.assertEquals(content, {
+ 'is_simple': True
+ })
+
+ def test_with_templates(self):
+ content = self.subject.load('templates.yml', {
+ 'woot': 'bar'
+ })
+
+ self.assertEquals(content, {
+ 'content': 'content',
+ 'variable': 'bar'
+ })
+
+ def test_inheritance(self):
+ '''
+ The simple single pass inheritance case.
+ '''
+ content = self.subject.load('inherit.yml', {})
+ self.assertEqual(content, {
+ 'content': 'content',
+ 'variable': 'inherit'
+ })
+
+ def test_inheritance_implicat_pass(self):
+ '''
+ Implicitly pass parameters from the child to the ancestor.
+ '''
+ content = self.subject.load('inherit_pass.yml', {
+ 'a': 'overriden'
+ })
+
+ self.assertEqual(content, {'values': ['overriden', 'b', 'c']})
+
+ def test_inheritance_circular(self):
+ '''
+ Circular reference handling.
+ '''
+ with self.assertRaisesRegexp(TemplatesException, 'circular'):
+ self.subject.load('circular.yml', {})
+
+ def test_deep_inheritance(self):
+ content = self.subject.load('deep/4.yml', {
+ 'value': 'myvalue'
+ })
+ self.assertEqual(content, {'variable': 'myvalue'})
+
+ def test_inheritance_with_simple_extensions(self):
+ content = self.subject.load('extend_parent.yml', {})
+ self.assertEquals(content, {
+ 'list': ['1', '2', '3', '4'],
+ 'obj': {
+ 'from_parent': True,
+ 'deeper': {
+ 'woot': 'bar',
+ 'list': ['baz', 'bar']
+ },
+ 'level': 2,
+ },
+ 'was_list': {'replaced': True}
+ })
+
+
+class MergeTest(unittest.TestCase):
+
+ def test_merge_to_dicts(self):
+ source = {'a': 1, 'b': 2}
+ dest = {'b': '20', 'c': 30}
+ expected = {
+ 'a': 1, # source only
+ 'b': 2, # source overrides dest
+ 'c': 30, # dest only
+ }
+ self.assertEqual(merge_to(source, dest), expected)
+ self.assertEqual(dest, expected)
+
+ def test_merge_to_lists(self):
+ source = {'x': [3, 4]}
+ dest = {'x': [1, 2]}
+ expected = {'x': [1, 2, 3, 4]} # dest first
+ self.assertEqual(merge_to(source, dest), expected)
+ self.assertEqual(dest, expected)
+
+ def test_merge_diff_types(self):
+ source = {'x': [1, 2]}
+ dest = {'x': 'abc'}
+ expected = {'x': [1, 2]} # source wins
+ self.assertEqual(merge_to(source, dest), expected)
+ self.assertEqual(dest, expected)
+
+ def test_merge(self):
+ first = {'a': 1, 'b': 2, 'd': 11}
+ second = {'b': 20, 'c': 30}
+ third = {'c': 300, 'd': 400}
+ expected = {
+ 'a': 1,
+ 'b': 20,
+ 'c': 300,
+ 'd': 400,
+ }
+ self.assertEqual(merge(first, second, third), expected)
+
+ # inputs haven't changed..
+ self.assertEqual(first, {'a': 1, 'b': 2, 'd': 11})
+ self.assertEqual(second, {'b': 20, 'c': 30})
+ self.assertEqual(third, {'c': 300, 'd': 400})
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/taskcluster/taskgraph/test/test_util_time.py b/taskcluster/taskgraph/test/test_util_time.py
new file mode 100755
index 000000000..f001c9d9c
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_util_time.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import unittest
+import mozunit
+from datetime import datetime
+from taskgraph.util.time import (
+ InvalidString,
+ UnknownTimeMeasurement,
+ value_of,
+ json_time_from_now
+)
+
+
+class FromNowTest(unittest.TestCase):
+
+ def test_invalid_str(self):
+ with self.assertRaises(InvalidString):
+ value_of('wtfs')
+
+ def test_missing_unit(self):
+ with self.assertRaises(InvalidString):
+ value_of('1')
+
+ def test_missing_unknown_unit(self):
+ with self.assertRaises(UnknownTimeMeasurement):
+ value_of('1z')
+
+ def test_value_of(self):
+ self.assertEqual(value_of('1s').total_seconds(), 1)
+ self.assertEqual(value_of('1 second').total_seconds(), 1)
+ self.assertEqual(value_of('1min').total_seconds(), 60)
+ self.assertEqual(value_of('1h').total_seconds(), 3600)
+ self.assertEqual(value_of('1d').total_seconds(), 86400)
+ self.assertEqual(value_of('1mo').total_seconds(), 2592000)
+ self.assertEqual(value_of('1 month').total_seconds(), 2592000)
+ self.assertEqual(value_of('1y').total_seconds(), 31536000)
+
+ with self.assertRaises(UnknownTimeMeasurement):
+ value_of('1m').total_seconds() # ambiguous between minute and month
+
+ def test_json_from_now_utc_now(self):
+ # Just here to ensure we don't raise.
+ json_time_from_now('1 years')
+
+ def test_json_from_now(self):
+ now = datetime(2014, 1, 1)
+ self.assertEqual(json_time_from_now('1 years', now),
+ '2015-01-01T00:00:00Z')
+ self.assertEqual(json_time_from_now('6 days', now),
+ '2014-01-07T00:00:00Z')
+
+if __name__ == '__main__':
+ mozunit.main()
diff --git a/taskcluster/taskgraph/test/test_util_treeherder.py b/taskcluster/taskgraph/test/test_util_treeherder.py
new file mode 100644
index 000000000..cf7513c00
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_util_treeherder.py
@@ -0,0 +1,23 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import unittest
+from taskgraph.util.treeherder import split_symbol, join_symbol
+
+
+class TestSymbols(unittest.TestCase):
+
+ def test_split_no_group(self):
+ self.assertEqual(split_symbol('xy'), ('?', 'xy'))
+
+ def test_split_with_group(self):
+ self.assertEqual(split_symbol('ab(xy)'), ('ab', 'xy'))
+
+ def test_join_no_group(self):
+ self.assertEqual(join_symbol('?', 'xy'), 'xy')
+
+ def test_join_with_group(self):
+ self.assertEqual(join_symbol('ab', 'xy'), 'ab(xy)')
diff --git a/taskcluster/taskgraph/test/test_util_yaml.py b/taskcluster/taskgraph/test/test_util_yaml.py
new file mode 100644
index 000000000..d4ff410db
--- /dev/null
+++ b/taskcluster/taskgraph/test/test_util_yaml.py
@@ -0,0 +1,23 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import unittest
+
+from ..util import yaml
+from mozunit import MockedOpen
+
+FOO_YML = """\
+prop:
+ - val1
+"""
+
+
+class TestYaml(unittest.TestCase):
+
+ def test_load(self):
+ with MockedOpen({'/dir1/dir2/foo.yml': FOO_YML}):
+ self.assertEqual(yaml.load_yaml("/dir1/dir2", "foo.yml"),
+ {'prop': ['val1']})
diff --git a/taskcluster/taskgraph/test/util.py b/taskcluster/taskgraph/test/util.py
new file mode 100644
index 000000000..cf9a49ad3
--- /dev/null
+++ b/taskcluster/taskgraph/test/util.py
@@ -0,0 +1,24 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from ..task import base
+
+
+class TestTask(base.Task):
+
+ def __init__(self, kind=None, label=None, attributes=None, task=None):
+ super(TestTask, self).__init__(
+ kind or 'test',
+ label or 'test-label',
+ attributes or {},
+ task or {})
+
+ @classmethod
+ def load_tasks(cls, kind, path, config, parameters):
+ return []
+
+ def get_dependencies(self, taskgraph):
+ return []
diff --git a/taskcluster/taskgraph/transforms/__init__.py b/taskcluster/taskgraph/transforms/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/__init__.py
diff --git a/taskcluster/taskgraph/transforms/android_stuff.py b/taskcluster/taskgraph/transforms/android_stuff.py
new file mode 100644
index 000000000..cb1e0fa5b
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/android_stuff.py
@@ -0,0 +1,46 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Set dynamic task description properties of the android stuff. Temporary!
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def setup_task(config, tasks):
+ for task in tasks:
+ task['label'] = task['name']
+ env = task['worker'].setdefault('env', {})
+ env.update({
+ 'GECKO_BASE_REPOSITORY': config.params['base_repository'],
+ 'GECKO_HEAD_REF': config.params['head_rev'],
+ 'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
+ 'GECKO_HEAD_REV': config.params['head_rev'],
+ 'MOZ_BUILD_DATE': config.params['moz_build_date'],
+ 'MOZ_SCM_LEVEL': config.params['level'],
+ 'MH_BRANCH': config.params['project'],
+ })
+
+ task['worker'].setdefault('caches', []).append({
+ 'type': 'persistent',
+ 'name': 'level-{}-{}-tc-vcs'.format(
+ config.params['level'], config.params['project']),
+ 'mount-point': "/home/worker/.tc-vcs",
+ })
+
+ if int(config.params['level']) > 1:
+ task['worker'].setdefault('caches', []).append({
+ 'type': 'persistent',
+ 'name': 'level-{}-{}-build-{}-workspace'.format(
+ config.params['level'], config.params['project'], task['name']),
+ 'mount-point': "/home/worker/workspace",
+ })
+
+ del task['name']
+ yield task
diff --git a/taskcluster/taskgraph/transforms/base.py b/taskcluster/taskgraph/transforms/base.py
new file mode 100644
index 000000000..aab139252
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/base.py
@@ -0,0 +1,126 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import re
+import pprint
+import voluptuous
+
+
+class TransformConfig(object):
+ """A container for configuration affecting transforms. The `config`
+ argument to transforms is an instance of this class, possibly with
+ additional kind-specific attributes beyond those set here."""
+ def __init__(self, kind, path, config, params):
+ # the name of the current kind
+ self.kind = kind
+
+ # the path to the kind configuration directory
+ self.path = path
+
+ # the parsed contents of kind.yml
+ self.config = config
+
+ # the parameters for this task-graph generation run
+ self.params = params
+
+
+class TransformSequence(object):
+ """
+ Container for a sequence of transforms. Each transform is represented as a
+ callable taking (config, items) and returning a generator which will yield
+ transformed items. The resulting sequence has the same interface.
+
+ This is convenient to use in a file full of transforms, as it provides a
+ decorator, @transforms.add, that will add the decorated function to the
+ sequence.
+ """
+
+ def __init__(self, transforms=None):
+ self.transforms = transforms or []
+
+ def __call__(self, config, items):
+ for xform in self.transforms:
+ items = xform(config, items)
+ if items is None:
+ raise Exception("Transform {} is not a generator".format(xform))
+ return items
+
+ def __repr__(self):
+ return '\n'.join(
+ ['TransformSequence(['] +
+ [repr(x) for x in self.transforms] +
+ ['])'])
+
+ def add(self, func):
+ self.transforms.append(func)
+ return func
+
+
+def validate_schema(schema, obj, msg_prefix):
+ """
+ Validate that object satisfies schema. If not, generate a useful exception
+ beginning with msg_prefix.
+ """
+ try:
+ return schema(obj)
+ except voluptuous.MultipleInvalid as exc:
+ msg = [msg_prefix]
+ for error in exc.errors:
+ msg.append(str(error))
+ raise Exception('\n'.join(msg) + '\n' + pprint.pformat(obj))
+
+
+def get_keyed_by(item, field, item_name, subfield=None):
+ """
+ For values which can either accept a literal value, or be keyed by some
+ other attribute of the item, perform that lookup. For example, this supports
+
+ chunks:
+ by-test-platform:
+ macosx-10.11/debug: 13
+ win.*: 6
+ default: 12
+
+ The `item_name` parameter is used to generate useful error messages.
+ The `subfield` parameter, if specified, allows access to a second level
+ of the item dictionary: item[field][subfield]. For example, this supports
+
+ mozharness:
+ config:
+ by-test-platform:
+ default: ...
+ """
+ value = item[field]
+ if not isinstance(value, dict):
+ return value
+ if subfield:
+ value = item[field][subfield]
+ if not isinstance(value, dict):
+ return value
+
+ assert len(value) == 1, "Invalid attribute {} in {}".format(field, item_name)
+ keyed_by = value.keys()[0]
+ values = value[keyed_by]
+ if keyed_by.startswith('by-'):
+ keyed_by = keyed_by[3:] # extract just the keyed-by field name
+ if item[keyed_by] in values:
+ return values[item[keyed_by]]
+ for k in values.keys():
+ if re.match(k, item[keyed_by]):
+ return values[k]
+ if 'default' in values:
+ return values['default']
+ for k in item[keyed_by], 'default':
+ if k in values:
+ return values[k]
+ else:
+ raise Exception(
+ "Neither {} {} nor 'default' found while determining item {} in {}".format(
+ keyed_by, item[keyed_by], field, item_name))
+ else:
+ raise Exception(
+ "Invalid attribute {} keyed-by value {} in {}".format(
+ field, keyed_by, item_name))
diff --git a/taskcluster/taskgraph/transforms/build.py b/taskcluster/taskgraph/transforms/build.py
new file mode 100644
index 000000000..3875cbbb1
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/build.py
@@ -0,0 +1,31 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Apply some defaults and minor modifications to the jobs defined in the build
+kind.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_defaults(config, jobs):
+ """Set defaults, including those that differ per worker implementation"""
+ for job in jobs:
+ job['treeherder'].setdefault('kind', 'build')
+ job['treeherder'].setdefault('tier', 1)
+ if job['worker']['implementation'] in ('docker-worker', 'docker-engine'):
+ job['worker'].setdefault('docker-image', {'in-tree': 'desktop-build'})
+ job['worker']['chain-of-trust'] = True
+ job.setdefault('extra', {})
+ job['extra'].setdefault('chainOfTrust', {})
+ job['extra']['chainOfTrust'].setdefault('inputs', {})
+ job['extra']['chainOfTrust']['inputs']['docker-image'] = {
+ "task-reference": "<docker-image>"
+ }
+ yield job
diff --git a/taskcluster/taskgraph/transforms/build_attrs.py b/taskcluster/taskgraph/transforms/build_attrs.py
new file mode 100644
index 000000000..56c007614
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/build_attrs.py
@@ -0,0 +1,33 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_build_attributes(config, jobs):
+ """
+ Set the build_platform and build_type attributes based on the job name.
+ Although not all jobs using this transform are actual "builds", the try
+ option syntax treats them as such, and this arranges the attributes
+ appropriately for that purpose.
+ """
+ for job in jobs:
+ build_platform, build_type = job['name'].split('/')
+
+ # pgo builds are represented as a different platform, type opt
+ if build_type == 'pgo':
+ build_platform = build_platform + '-pgo'
+ build_type = 'opt'
+
+ attributes = job.setdefault('attributes', {})
+ attributes.update({
+ 'build_platform': build_platform,
+ 'build_type': build_type,
+ })
+
+ yield job
diff --git a/taskcluster/taskgraph/transforms/gecko_v2_whitelist.py b/taskcluster/taskgraph/transforms/gecko_v2_whitelist.py
new file mode 100644
index 000000000..3817faa50
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/gecko_v2_whitelist.py
@@ -0,0 +1,77 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This file contains a whitelist of gecko.v2 index route job names. The intent
+of this whitelist is to raise an alarm when new jobs are added. If those jobs
+already run in Buildbot, then it's important that the generated index routes
+match (and that only one of Buildbot and TaskCluster be tier-1 at any time).
+If the jobs are new and never ran in Buildbot, then their job name can be added
+here without any further fuss.
+
+Once all jobs have been ported from Buildbot, this file can be removed.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+# please keep me in lexical order
+JOB_NAME_WHITELIST = set([
+ 'android-api-15-debug',
+ 'android-api-15-gradle-dependencies-opt',
+ 'android-api-15-gradle-opt',
+ 'android-api-15-opt',
+ 'android-api-15-nightly-opt',
+ 'android-api-15-partner-sample1-opt',
+ 'android-l10n-opt',
+ 'android-x86-opt',
+ 'aries-debug',
+ 'aries-eng-opt',
+ 'browser-haz-debug',
+ 'linux32-l10n-opt',
+ 'linux64-artifact-opt',
+ 'linux64-asan-debug',
+ 'linux64-asan-opt',
+ 'linux64-ccov-opt',
+ 'linux64-debug',
+ 'linux64-jsdcov-opt',
+ 'linux64-l10n-opt',
+ 'linux64-opt',
+ 'linux64-pgo',
+ 'linux64-st-an-opt',
+ 'linux64-valgrind-opt',
+ 'linux-debug',
+ 'linux-opt',
+ 'linux-pgo',
+ 'macosx64-debug',
+ 'macosx64-opt',
+ 'macosx64-st-an-opt',
+ 'nexus-5-l-eng-debug',
+ 'nexus-5-l-eng-opt',
+ 'osx-10-10',
+ 'shell-haz-debug',
+ 'sm-arm64-sim-debug',
+ 'sm-arm-sim-debug',
+ 'sm-asan-opt',
+ 'sm-compacting-debug',
+ 'sm-mozjs-sys-debug',
+ 'sm-msan-opt',
+ 'sm-nonunified-debug',
+ 'sm-package-opt',
+ 'sm-plaindebug-debug',
+ 'sm-plain-opt',
+ 'sm-rootanalysis-debug',
+ 'sm-tsan-opt',
+ 'win32-debug',
+ 'win32-opt',
+ 'win32-pgo',
+ 'win64-debug',
+ 'win64-opt',
+ 'win64-pgo',
+])
+
+JOB_NAME_WHITELIST_ERROR = """\
+The gecko-v2 job name {} is not in the whitelist in __file__.
+If this job runs on Buildbot, please ensure that the job names match between
+Buildbot and TaskCluster, then add the job name to the whitelist. If this is a
+new job, there is nothing to check -- just add the job to the whitelist.
+"""
diff --git a/taskcluster/taskgraph/transforms/job/__init__.py b/taskcluster/taskgraph/transforms/job/__init__.py
new file mode 100644
index 000000000..a0860c032
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/__init__.py
@@ -0,0 +1,164 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Convert a job description into a task description.
+
+Jobs descriptions are similar to task descriptions, but they specify how to run
+the job at a higher level, using a "run" field that can be interpreted by
+run-using handlers in `taskcluster/taskgraph/transforms/job`.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import copy
+import logging
+import os
+
+from taskgraph.transforms.base import validate_schema, TransformSequence
+from taskgraph.transforms.task import task_description_schema
+from voluptuous import (
+ Optional,
+ Required,
+ Schema,
+ Extra,
+)
+
+logger = logging.getLogger(__name__)
+
+# Voluptuous uses marker objects as dictionary *keys*, but they are not
+# comparable, so we cast all of the keys back to regular strings
+task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
+
+# Schema for a build description
+job_description_schema = Schema({
+ # The name of the job and the job's label. At least one must be specified,
+ # and the label will be generated from the name if necessary, by prepending
+ # the kind.
+ Optional('name'): basestring,
+ Optional('label'): basestring,
+
+ # the following fields are passed directly through to the task description,
+ # possibly modified by the run implementation. See
+ # taskcluster/taskgraph/transforms/task.py for the schema details.
+ Required('description'): task_description_schema['description'],
+ Optional('attributes'): task_description_schema['attributes'],
+ Optional('dependencies'): task_description_schema['dependencies'],
+ Optional('expires-after'): task_description_schema['expires-after'],
+ Optional('routes'): task_description_schema['routes'],
+ Optional('scopes'): task_description_schema['scopes'],
+ Optional('extra'): task_description_schema['extra'],
+ Optional('treeherder'): task_description_schema['treeherder'],
+ Optional('index'): task_description_schema['index'],
+ Optional('run-on-projects'): task_description_schema['run-on-projects'],
+ Optional('coalesce-name'): task_description_schema['coalesce-name'],
+ Optional('worker-type'): task_description_schema['worker-type'],
+ Required('worker'): task_description_schema['worker'],
+ Optional('when'): task_description_schema['when'],
+
+ # A description of how to run this job.
+ 'run': {
+ # The key to a job implementation in a peer module to this one
+ 'using': basestring,
+
+ # Any remaining content is verified against that job implementation's
+ # own schema.
+ Extra: object,
+ },
+})
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def validate(config, jobs):
+ for job in jobs:
+ yield validate_schema(job_description_schema, job,
+ "In job {!r}:".format(job['name']))
+
+
+@transforms.add
+def make_task_description(config, jobs):
+ """Given a build description, create a task description"""
+ # import plugin modules first, before iterating over jobs
+ import_all()
+ for job in jobs:
+ if 'label' not in job:
+ if 'name' not in job:
+ raise Exception("job has neither a name nor a label")
+ job['label'] = '{}-{}'.format(config.kind, job['name'])
+ if job['name']:
+ del job['name']
+
+ taskdesc = copy.deepcopy(job)
+
+ # fill in some empty defaults to make run implementations easier
+ taskdesc.setdefault('attributes', {})
+ taskdesc.setdefault('dependencies', {})
+ taskdesc.setdefault('routes', [])
+ taskdesc.setdefault('scopes', [])
+ taskdesc.setdefault('extra', {})
+
+ # give the function for job.run.using on this worker implementation a
+ # chance to set up the task description.
+ configure_taskdesc_for_run(config, job, taskdesc)
+ del taskdesc['run']
+
+ # yield only the task description, discarding the job description
+ yield taskdesc
+
+# A registry of all functions decorated with run_job_using
+registry = {}
+
+
+def run_job_using(worker_implementation, run_using, schema=None):
+ """Register the decorated function as able to set up a task description for
+ jobs with the given worker implementation and `run.using` property. If
+ `schema` is given, the job's run field will be verified to match it.
+
+ The decorated function should have the signature `using_foo(config, job,
+ taskdesc) and should modify the task description in-place. The skeleton of
+ the task description is already set up, but without a payload."""
+ def wrap(func):
+ for_run_using = registry.setdefault(run_using, {})
+ if worker_implementation in for_run_using:
+ raise Exception("run_job_using({!r}, {!r}) already exists: {!r}".format(
+ run_using, worker_implementation, for_run_using[run_using]))
+ for_run_using[worker_implementation] = (func, schema)
+ return func
+ return wrap
+
+
+def configure_taskdesc_for_run(config, job, taskdesc):
+ """
+ Run the appropriate function for this job against the given task
+ description.
+
+ This will raise an appropriate error if no function exists, or if the job's
+ run is not valid according to the schema.
+ """
+ run_using = job['run']['using']
+ if run_using not in registry:
+ raise Exception("no functions for run.using {!r}".format(run_using))
+
+ worker_implementation = job['worker']['implementation']
+ if worker_implementation not in registry[run_using]:
+ raise Exception("no functions for run.using {!r} on {!r}".format(
+ run_using, worker_implementation))
+
+ func, schema = registry[run_using][worker_implementation]
+ if schema:
+ job['run'] = validate_schema(
+ schema, job['run'],
+ "In job.run using {!r} for job {!r}:".format(
+ job['run']['using'], job['label']))
+
+ func(config, job, taskdesc)
+
+
+def import_all():
+ """Import all modules that are siblings of this one, triggering the decorator
+ above in the process."""
+ for f in os.listdir(os.path.dirname(__file__)):
+ if f.endswith('.py') and f not in ('commmon.py', '__init__.py'):
+ __import__('taskgraph.transforms.job.' + f[:-3])
diff --git a/taskcluster/taskgraph/transforms/job/common.py b/taskcluster/taskgraph/transforms/job/common.py
new file mode 100644
index 000000000..59a51d75a
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/common.py
@@ -0,0 +1,108 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Common support for various job types. These functions are all named after the
+worker implementation they operate on, and take the same three parameters, for
+consistency.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+SECRET_SCOPE = 'secrets:get:project/releng/gecko/{}/level-{}/{}'
+
+
+def docker_worker_add_workspace_cache(config, job, taskdesc):
+ """Add the workspace cache based on the build platform/type and level,
+ except on try where workspace caches are not used."""
+ if config.params['project'] == 'try':
+ return
+
+ taskdesc['worker'].setdefault('caches', []).append({
+ 'type': 'persistent',
+ 'name': 'level-{}-{}-build-{}-{}-workspace'.format(
+ config.params['level'], config.params['project'],
+ taskdesc['attributes']['build_platform'],
+ taskdesc['attributes']['build_type'],
+ ),
+ 'mount-point': "/home/worker/workspace",
+ })
+
+
+def docker_worker_add_tc_vcs_cache(config, job, taskdesc):
+ taskdesc['worker'].setdefault('caches', []).append({
+ 'type': 'persistent',
+ 'name': 'level-{}-{}-tc-vcs'.format(
+ config.params['level'], config.params['project']),
+ 'mount-point': "/home/worker/.tc-vcs",
+ })
+
+
+def docker_worker_add_public_artifacts(config, job, taskdesc):
+ taskdesc['worker'].setdefault('artifacts', []).append({
+ 'name': 'public/build',
+ 'path': '/home/worker/artifacts/',
+ 'type': 'directory',
+ })
+
+
+def docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc):
+ """Add the GECKO_BASE_* and GECKO_HEAD_* env vars to the worker."""
+ env = taskdesc['worker'].setdefault('env', {})
+ env.update({
+ 'GECKO_BASE_REPOSITORY': config.params['base_repository'],
+ 'GECKO_HEAD_REF': config.params['head_rev'],
+ 'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
+ 'GECKO_HEAD_REV': config.params['head_rev'],
+ })
+
+
+def docker_worker_support_vcs_checkout(config, job, taskdesc):
+ """Update a job/task with parameters to enable a VCS checkout.
+
+ The configuration is intended for tasks using "run-task" and its
+ VCS checkout behavior.
+ """
+ level = config.params['level']
+
+ taskdesc['worker'].setdefault('caches', []).append({
+ 'type': 'persistent',
+ # History of versions:
+ #
+ # ``level-%s-checkouts`` was initially used and contained a number
+ # of backwards incompatible changes, such as moving HG_STORE_PATH
+ # from a separate cache to this cache.
+ #
+ # ``v1`` was introduced to provide a clean break from the unversioned
+ # cache.
+ 'name': 'level-%s-checkouts-v1' % level,
+ 'mount-point': '/home/worker/checkouts',
+ })
+
+ taskdesc['worker'].setdefault('env', {}).update({
+ 'GECKO_BASE_REPOSITORY': config.params['base_repository'],
+ 'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
+ 'GECKO_HEAD_REV': config.params['head_rev'],
+ 'HG_STORE_PATH': '/home/worker/checkouts/hg-store',
+ })
+
+ # Give task access to hgfingerprint secret so it can pin the certificate
+ # for hg.mozilla.org.
+ taskdesc['scopes'].append('secrets:get:project/taskcluster/gecko/hgfingerprint')
+ taskdesc['worker']['taskcluster-proxy'] = True
+
+
+def docker_worker_setup_secrets(config, job, taskdesc):
+ """Set up access to secrets via taskcluster-proxy. The value of
+ run['secrets'] should be a boolean or a list of secret names that
+ can be accessed."""
+ if not job['run'].get('secrets'):
+ return
+
+ taskdesc['worker']['taskcluster-proxy'] = True
+ secrets = job['run']['secrets']
+ if secrets is True:
+ secrets = ['*']
+ for sec in secrets:
+ taskdesc['scopes'].append(SECRET_SCOPE.format(
+ job['treeherder']['kind'], config.params['level'], sec))
diff --git a/taskcluster/taskgraph/transforms/job/hazard.py b/taskcluster/taskgraph/transforms/job/hazard.py
new file mode 100644
index 000000000..c5b500843
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/hazard.py
@@ -0,0 +1,91 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running hazard jobs via dedicated scripts
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from voluptuous import Schema, Required, Optional, Any
+
+from taskgraph.transforms.job import run_job_using
+from taskgraph.transforms.job.common import (
+ docker_worker_add_workspace_cache,
+ docker_worker_setup_secrets,
+ docker_worker_add_public_artifacts,
+ docker_worker_support_vcs_checkout,
+)
+
+haz_run_schema = Schema({
+ Required('using'): 'hazard',
+
+ # The command to run within the task image (passed through to the worker)
+ Required('command'): basestring,
+
+ # The tooltool manifest to use; default in the script is used if omitted
+ Optional('tooltool-manifest'): basestring,
+
+ # The mozconfig to use; default in the script is used if omitted
+ Optional('mozconfig'): basestring,
+
+ # The set of secret names to which the task has access; these are prefixed
+ # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting
+ # this will enable any worker features required and set the task's scopes
+ # appropriately. `true` here means ['*'], all secrets. Not supported on
+ # Windows
+ Required('secrets', default=False): Any(bool, [basestring]),
+})
+
+
+@run_job_using("docker-worker", "hazard", schema=haz_run_schema)
+def docker_worker_hazard(config, job, taskdesc):
+ run = job['run']
+
+ worker = taskdesc['worker']
+ worker['artifacts'] = []
+ worker['caches'] = []
+
+ docker_worker_add_public_artifacts(config, job, taskdesc)
+ docker_worker_add_workspace_cache(config, job, taskdesc)
+ docker_worker_setup_secrets(config, job, taskdesc)
+ docker_worker_support_vcs_checkout(config, job, taskdesc)
+
+ env = worker['env']
+ env.update({
+ 'MOZ_BUILD_DATE': config.params['moz_build_date'],
+ 'MOZ_SCM_LEVEL': config.params['level'],
+ })
+
+ # script parameters
+ if run.get('tooltool-manifest'):
+ env['TOOLTOOL_MANIFEST'] = run['tooltool-manifest']
+ if run.get('mozconfig'):
+ env['MOZCONFIG'] = run['mozconfig']
+
+ # tooltool downloads
+ worker['caches'].append({
+ 'type': 'persistent',
+ 'name': 'tooltool-cache',
+ 'mount-point': '/home/worker/tooltool-cache',
+ })
+ worker['relengapi-proxy'] = True
+ taskdesc['scopes'].extend([
+ 'docker-worker:relengapi-proxy:tooltool.download.public',
+ ])
+ env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
+ env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool'
+ env['TOOLTOOL_REV'] = 'master'
+
+ # build-haz-linux.sh needs this otherwise it assumes the checkout is in
+ # the workspace.
+ env['GECKO_DIR'] = '/home/worker/checkouts/gecko'
+
+ worker['command'] = [
+ '/home/worker/bin/run-task',
+ '--chown-recursive', '/home/worker/tooltool-cache',
+ '--chown-recursive', '/home/worker/workspace',
+ '--vcs-checkout', '/home/worker/checkouts/gecko',
+ '--',
+ '/bin/bash', '-c', run['command']
+ ]
diff --git a/taskcluster/taskgraph/transforms/job/mach.py b/taskcluster/taskgraph/transforms/job/mach.py
new file mode 100644
index 000000000..8df202dbc
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/mach.py
@@ -0,0 +1,30 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running mach tasks (via run-task)
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.job import run_job_using
+from taskgraph.transforms.job.run_task import docker_worker_run_task
+from voluptuous import Schema, Required
+
+mach_schema = Schema({
+ Required('using'): 'mach',
+
+ # The mach command (omitting `./mach`) to run
+ Required('mach'): basestring,
+})
+
+
+@run_job_using("docker-worker", "mach", schema=mach_schema)
+def docker_worker_mach(config, job, taskdesc):
+ run = job['run']
+
+ # defer to the run_task implementation
+ run['command'] = 'cd /home/worker/checkouts/gecko && ./mach ' + run['mach']
+ run['checkout'] = True
+ del run['mach']
+ docker_worker_run_task(config, job, taskdesc)
diff --git a/taskcluster/taskgraph/transforms/job/mozharness.py b/taskcluster/taskgraph/transforms/job/mozharness.py
new file mode 100644
index 000000000..fb3cd00dd
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/mozharness.py
@@ -0,0 +1,226 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+
+Support for running jobs via mozharness. Ideally, most stuff gets run this
+way, and certainly anything using mozharness should use this approach.
+
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from voluptuous import Schema, Required, Optional, Any
+
+from taskgraph.transforms.job import run_job_using
+from taskgraph.transforms.job.common import (
+ docker_worker_add_workspace_cache,
+ docker_worker_add_gecko_vcs_env_vars,
+ docker_worker_setup_secrets,
+ docker_worker_add_public_artifacts,
+ docker_worker_support_vcs_checkout,
+)
+
+COALESCE_KEY = 'builds.{project}.{name}'
+
+mozharness_run_schema = Schema({
+ Required('using'): 'mozharness',
+
+ # the mozharness script used to run this task, relative to the testing/
+ # directory and using forward slashes even on Windows
+ Required('script'): basestring,
+
+ # the config files required for the task, relative to
+ # testing/mozharness/configs and using forward slashes even on Windows
+ Required('config'): [basestring],
+
+ # any additional actions to pass to the mozharness command; not supported
+ # on Windows
+ Optional('actions'): [basestring],
+
+ # any additional options (without leading --) to be passed to mozharness;
+ # not supported on Windows
+ Optional('options'): [basestring],
+
+ # --custom-build-variant-cfg value (not supported on Windows)
+ Optional('custom-build-variant-cfg'): basestring,
+
+ # If not false, tooltool downloads will be enabled via relengAPIProxy
+ # for either just public files, or all files. Not supported on Windows
+ Required('tooltool-downloads', default=False): Any(
+ False,
+ 'public',
+ 'internal',
+ ),
+
+ # The set of secret names to which the task has access; these are prefixed
+ # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting
+ # this will enable any worker features required and set the task's scopes
+ # appropriately. `true` here means ['*'], all secrets. Not supported on
+ # Windows
+ Required('secrets', default=False): Any(bool, [basestring]),
+
+ # If true, taskcluster proxy will be enabled; note that it may also be enabled
+ # automatically e.g., for secrets support. Not supported on Windows.
+ Required('taskcluster-proxy', default=False): bool,
+
+ # If true, the build scripts will start Xvfb. Not supported on Windows.
+ Required('need-xvfb', default=False): bool,
+
+ # If false, indicate that builds should skip producing artifacts. Not
+ # supported on Windows.
+ Required('keep-artifacts', default=True): bool,
+
+ # If specified, use the in-tree job script specified.
+ Optional('job-script'): basestring,
+})
+
+
+@run_job_using("docker-worker", "mozharness", schema=mozharness_run_schema)
+def mozharness_on_docker_worker_setup(config, job, taskdesc):
+ run = job['run']
+
+ worker = taskdesc['worker']
+ worker['implementation'] = job['worker']['implementation']
+
+ # running via mozharness assumes desktop-build (which contains build.sh)
+ taskdesc['worker']['docker-image'] = {"in-tree": "desktop-build"}
+
+ worker['relengapi-proxy'] = False # but maybe enabled for tooltool below
+ worker['taskcluster-proxy'] = run.get('taskcluster-proxy')
+
+ docker_worker_add_public_artifacts(config, job, taskdesc)
+ docker_worker_add_workspace_cache(config, job, taskdesc)
+ docker_worker_support_vcs_checkout(config, job, taskdesc)
+
+ env = worker.setdefault('env', {})
+ env.update({
+ 'MOZHARNESS_CONFIG': ' '.join(run['config']),
+ 'MOZHARNESS_SCRIPT': run['script'],
+ 'MH_BRANCH': config.params['project'],
+ 'MH_BUILD_POOL': 'taskcluster',
+ 'MOZ_BUILD_DATE': config.params['moz_build_date'],
+ 'MOZ_SCM_LEVEL': config.params['level'],
+ })
+
+ if 'actions' in run:
+ env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions'])
+
+ if 'options' in run:
+ env['MOZHARNESS_OPTIONS'] = ' '.join(run['options'])
+
+ if 'custom-build-variant-cfg' in run:
+ env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run['custom-build-variant-cfg']
+
+ if 'job-script' in run:
+ env['JOB_SCRIPT'] = run['job-script']
+
+ # if we're not keeping artifacts, set some env variables to empty values
+ # that will cause the build process to skip copying the results to the
+ # artifacts directory. This will have no effect for operations that are
+ # not builds.
+ if not run['keep-artifacts']:
+ env['DIST_TARGET_UPLOADS'] = ''
+ env['DIST_UPLOADS'] = ''
+
+ # Xvfb
+ if run['need-xvfb']:
+ env['NEED_XVFB'] = 'true'
+
+ # tooltool downloads
+ if run['tooltool-downloads']:
+ worker['relengapi-proxy'] = True
+ worker['caches'].append({
+ 'type': 'persistent',
+ 'name': 'tooltool-cache',
+ 'mount-point': '/home/worker/tooltool-cache',
+ })
+ taskdesc['scopes'].extend([
+ 'docker-worker:relengapi-proxy:tooltool.download.public',
+ ])
+ if run['tooltool-downloads'] == 'internal':
+ taskdesc['scopes'].append(
+ 'docker-worker:relengapi-proxy:tooltool.download.internal')
+ env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
+ env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool'
+ env['TOOLTOOL_REV'] = 'master'
+
+ # Retry if mozharness returns TBPL_RETRY
+ worker['retry-exit-status'] = 4
+
+ docker_worker_setup_secrets(config, job, taskdesc)
+
+ command = [
+ '/home/worker/bin/run-task',
+ # Various caches/volumes are default owned by root:root.
+ '--chown-recursive', '/home/worker/workspace',
+ '--chown-recursive', '/home/worker/tooltool-cache',
+ '--vcs-checkout', '/home/worker/workspace/build/src',
+ '--tools-checkout', '/home/worker/workspace/build/tools',
+ '--',
+ ]
+ command.append("/home/worker/workspace/build/src/{}".format(
+ run.get('job-script',
+ "taskcluster/scripts/builder/build-linux.sh"
+ )))
+
+ worker['command'] = command
+
+
+# We use the generic worker to run tasks on Windows
+@run_job_using("generic-worker", "mozharness", schema=mozharness_run_schema)
+def mozharness_on_windows(config, job, taskdesc):
+ run = job['run']
+
+ # fail if invalid run options are included
+ invalid = []
+ for prop in ['actions', 'custom-build-variant-cfg',
+ 'tooltool-downloads', 'secrets', 'taskcluster-proxy',
+ 'need-xvfb']:
+ if prop in run and run[prop]:
+ invalid.append(prop)
+ if not run.get('keep-artifacts', True):
+ invalid.append('keep-artifacts')
+ if invalid:
+ raise Exception("Jobs run using mozharness on Windows do not support properties " +
+ ', '.join(invalid))
+
+ worker = taskdesc['worker']
+
+ worker['artifacts'] = [{
+ 'path': r'public\build',
+ 'type': 'directory',
+ }]
+
+ docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
+
+ env = worker['env']
+ env.update({
+ 'MOZ_BUILD_DATE': config.params['moz_build_date'],
+ 'MOZ_SCM_LEVEL': config.params['level'],
+ 'TOOLTOOL_REPO': 'https://github.com/mozilla/build-tooltool',
+ 'TOOLTOOL_REV': 'master',
+ })
+
+ mh_command = [r'c:\mozilla-build\python\python.exe']
+ mh_command.append('\\'.join([r'.\build\src\testing', run['script'].replace('/', '\\')]))
+ for cfg in run['config']:
+ mh_command.append('--config ' + cfg.replace('/', '\\'))
+ mh_command.append('--branch ' + config.params['project'])
+ mh_command.append(r'--skip-buildbot-actions --work-dir %cd:Z:=z:%\build')
+ for option in run.get('options', []):
+ mh_command.append('--' + option)
+
+ hg_command = ['"c:\\Program Files\\Mercurial\\hg.exe"']
+ hg_command.append('robustcheckout')
+ hg_command.extend(['--sharebase', 'y:\\hg-shared'])
+ hg_command.append('--purge')
+ hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified'])
+ hg_command.extend(['--revision', env['GECKO_HEAD_REV']])
+ hg_command.append(env['GECKO_HEAD_REPOSITORY'])
+ hg_command.append('.\\build\\src')
+
+ worker['command'] = [
+ ' '.join(hg_command),
+ ' '.join(mh_command)
+ ]
diff --git a/taskcluster/taskgraph/transforms/job/run_task.py b/taskcluster/taskgraph/transforms/job/run_task.py
new file mode 100644
index 000000000..296fe43ee
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/run_task.py
@@ -0,0 +1,59 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running jobs that are invoked via the `run-task` script.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import copy
+
+from taskgraph.transforms.job import run_job_using
+from taskgraph.transforms.job.common import (
+ docker_worker_support_vcs_checkout,
+)
+from voluptuous import Schema, Required, Any
+
+run_task_schema = Schema({
+ Required('using'): 'run-task',
+
+ # if true, add a cache at ~worker/.cache, which is where things like pip
+ # tend to hide their caches. This cache is never added for level-1 jobs.
+ Required('cache-dotcache', default=False): bool,
+
+ # if true (the default), perform a checkout in /home/worker/checkouts/gecko
+ Required('checkout', default=True): bool,
+
+ # The command arguments to pass to the `run-task` script, after the
+ # checkout arguments. If a list, it will be passed directly; otherwise
+ # it will be included in a single argument to `bash -cx`.
+ Required('command'): Any([basestring], basestring),
+})
+
+
+@run_job_using("docker-worker", "run-task", schema=run_task_schema)
+def docker_worker_run_task(config, job, taskdesc):
+ run = job['run']
+
+ worker = taskdesc['worker'] = copy.deepcopy(job['worker'])
+
+ if run['checkout']:
+ docker_worker_support_vcs_checkout(config, job, taskdesc)
+
+ if run.get('cache-dotcache') and int(config.params['level']) > 1:
+ worker['caches'].append({
+ 'type': 'persistent',
+ 'name': 'level-{level}-{project}-dotcache'.format(**config.params),
+ 'mount-point': '/home/worker/.cache',
+ })
+
+ run_command = run['command']
+ if isinstance(run_command, basestring):
+ run_command = ['bash', '-cx', run_command]
+ command = ['/home/worker/bin/run-task']
+ if run['checkout']:
+ command.append('--vcs-checkout=/home/worker/checkouts/gecko')
+ command.append('--')
+ command.extend(run_command)
+ worker['command'] = command
diff --git a/taskcluster/taskgraph/transforms/job/spidermonkey.py b/taskcluster/taskgraph/transforms/job/spidermonkey.py
new file mode 100644
index 000000000..d78b78504
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/spidermonkey.py
@@ -0,0 +1,86 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running spidermonkey jobs via dedicated scripts
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from voluptuous import Schema, Required, Optional, Any
+
+from taskgraph.transforms.job import run_job_using
+from taskgraph.transforms.job.common import (
+ docker_worker_add_public_artifacts,
+ docker_worker_support_vcs_checkout,
+)
+
+sm_run_schema = Schema({
+ Required('using'): Any('spidermonkey', 'spidermonkey-package', 'spidermonkey-mozjs-crate'),
+
+ # The SPIDERMONKEY_VARIANT
+ Required('spidermonkey-variant'): basestring,
+
+ # The tooltool manifest to use; default from sm-tooltool-config.sh is used
+ # if omitted
+ Optional('tooltool-manifest'): basestring,
+})
+
+
+@run_job_using("docker-worker", "spidermonkey")
+@run_job_using("docker-worker", "spidermonkey-package")
+@run_job_using("docker-worker", "spidermonkey-mozjs-crate")
+def docker_worker_spidermonkey(config, job, taskdesc, schema=sm_run_schema):
+ run = job['run']
+
+ worker = taskdesc['worker']
+ worker['artifacts'] = []
+ worker['caches'] = []
+
+ if int(config.params['level']) > 1:
+ worker['caches'].append({
+ 'type': 'persistent',
+ 'name': 'level-{}-{}-build-spidermonkey-workspace'.format(
+ config.params['level'], config.params['project']),
+ 'mount-point': "/home/worker/workspace",
+ })
+
+ docker_worker_add_public_artifacts(config, job, taskdesc)
+
+ env = worker['env']
+ env.update({
+ 'MOZHARNESS_DISABLE': 'true',
+ 'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'],
+ 'MOZ_BUILD_DATE': config.params['moz_build_date'],
+ 'MOZ_SCM_LEVEL': config.params['level'],
+ })
+
+ # tooltool downloads; note that this script downloads using the API
+ # endpoiint directly, rather than via relengapi-proxy
+ worker['caches'].append({
+ 'type': 'persistent',
+ 'name': 'tooltool-cache',
+ 'mount-point': '/home/worker/tooltool-cache',
+ })
+ env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
+ if run.get('tooltool-manifest'):
+ env['TOOLTOOL_MANIFEST'] = run['tooltool-manifest']
+
+ docker_worker_support_vcs_checkout(config, job, taskdesc)
+
+ script = "build-sm.sh"
+ if run['using'] == 'spidermonkey-package':
+ script = "build-sm-package.sh"
+ elif run['using'] == 'spidermonkey-mozjs-crate':
+ script = "build-sm-mozjs-crate.sh"
+
+ worker['command'] = [
+ '/home/worker/bin/run-task',
+ '--chown-recursive', '/home/worker/workspace',
+ '--chown-recursive', '/home/worker/tooltool-cache',
+ '--vcs-checkout', '/home/worker/workspace/build/src',
+ '--',
+ '/bin/bash',
+ '-c',
+ 'cd /home/worker && workspace/build/src/taskcluster/scripts/builder/%s' % script
+ ]
diff --git a/taskcluster/taskgraph/transforms/job/toolchain.py b/taskcluster/taskgraph/transforms/job/toolchain.py
new file mode 100644
index 000000000..d814f7824
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/job/toolchain.py
@@ -0,0 +1,115 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Support for running toolchain-building jobs via dedicated scripts
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from voluptuous import Schema, Required
+
+from taskgraph.transforms.job import run_job_using
+from taskgraph.transforms.job.common import (
+ docker_worker_add_tc_vcs_cache,
+ docker_worker_add_gecko_vcs_env_vars
+)
+
+toolchain_run_schema = Schema({
+ Required('using'): 'toolchain-script',
+
+ # the script (in taskcluster/scripts/misc) to run
+ Required('script'): basestring,
+})
+
+
+@run_job_using("docker-worker", "toolchain-script", schema=toolchain_run_schema)
+def docker_worker_toolchain(config, job, taskdesc):
+ run = job['run']
+
+ worker = taskdesc['worker']
+ worker['artifacts'] = []
+ worker['caches'] = []
+
+ worker['artifacts'].append({
+ 'name': 'public',
+ 'path': '/home/worker/workspace/artifacts/',
+ 'type': 'directory',
+ })
+
+ docker_worker_add_tc_vcs_cache(config, job, taskdesc)
+ docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
+
+ env = worker['env']
+ env.update({
+ 'MOZ_BUILD_DATE': config.params['moz_build_date'],
+ 'MOZ_SCM_LEVEL': config.params['level'],
+ 'TOOLS_DISABLE': 'true',
+ })
+
+ # tooltool downloads; note that this downloads using the API endpoint directly,
+ # rather than via relengapi-proxy
+ worker['caches'].append({
+ 'type': 'persistent',
+ 'name': 'tooltool-cache',
+ 'mount-point': '/home/worker/tooltool-cache',
+ })
+ env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
+ env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool'
+ env['TOOLTOOL_REV'] = 'master'
+
+ command = ' && '.join([
+ "cd /home/worker/",
+ "./bin/checkout-sources.sh",
+ "./workspace/build/src/taskcluster/scripts/misc/" + run['script'],
+ ])
+ worker['command'] = ["/bin/bash", "-c", command]
+
+
+@run_job_using("generic-worker", "toolchain-script", schema=toolchain_run_schema)
+def windows_toolchain(config, job, taskdesc):
+ run = job['run']
+
+ worker = taskdesc['worker']
+
+ worker['artifacts'] = [{
+ 'path': r'public\build',
+ 'type': 'directory',
+ }]
+
+ docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
+
+ # We fetch LLVM SVN into this.
+ svn_cache = 'level-{}-toolchain-clang-cl-build-svn'.format(config.params['level'])
+ worker['mounts'] = [{
+ 'cache-name': svn_cache,
+ 'path': r'llvm-sources',
+ }]
+ taskdesc['scopes'].extend([
+ 'generic-worker:cache:' + svn_cache,
+ ])
+
+ env = worker['env']
+ env.update({
+ 'MOZ_BUILD_DATE': config.params['moz_build_date'],
+ 'MOZ_SCM_LEVEL': config.params['level'],
+ 'TOOLTOOL_REPO': 'https://github.com/mozilla/build-tooltool',
+ 'TOOLTOOL_REV': 'master',
+ })
+
+ hg = r'c:\Program Files\Mercurial\hg.exe'
+ hg_command = ['"{}"'.format(hg)]
+ hg_command.append('robustcheckout')
+ hg_command.extend(['--sharebase', 'y:\\hg-shared'])
+ hg_command.append('--purge')
+ hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified'])
+ hg_command.extend(['--revision', '%GECKO_HEAD_REV%'])
+ hg_command.append('%GECKO_HEAD_REPOSITORY%')
+ hg_command.append('.\\build\\src')
+
+ bash = r'c:\mozilla-build\msys\bin\bash'
+ worker['command'] = [
+ ' '.join(hg_command),
+ # do something intelligent.
+ r'{} -c ./build/src/taskcluster/scripts/misc/{}'.format(bash, run['script'])
+ ]
diff --git a/taskcluster/taskgraph/transforms/l10n.py b/taskcluster/taskgraph/transforms/l10n.py
new file mode 100644
index 000000000..42137b558
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/l10n.py
@@ -0,0 +1,44 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Do transforms specific to l10n kind
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def mh_config_replace_project(config, jobs):
+ """ Replaces {project} in mh config entries with the current project """
+ # XXXCallek This is a bad pattern but exists to satisfy ease-of-porting for buildbot
+ for job in jobs:
+ if not job['run'].get('using') == 'mozharness':
+ # Nothing to do, not mozharness
+ yield job
+ continue
+ job['run']['config'] = map(
+ lambda x: x.format(project=config.params['project']),
+ job['run']['config']
+ )
+ yield job
+
+
+@transforms.add
+def mh_options_replace_project(config, jobs):
+ """ Replaces {project} in mh option entries with the current project """
+ # XXXCallek This is a bad pattern but exists to satisfy ease-of-porting for buildbot
+ for job in jobs:
+ if not job['run'].get('using') == 'mozharness':
+ # Nothing to do, not mozharness
+ yield job
+ continue
+ job['run']['options'] = map(
+ lambda x: x.format(project=config.params['project']),
+ job['run']['options']
+ )
+ yield job
diff --git a/taskcluster/taskgraph/transforms/marionette_harness.py b/taskcluster/taskgraph/transforms/marionette_harness.py
new file mode 100644
index 000000000..a24db470c
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/marionette_harness.py
@@ -0,0 +1,37 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Set dynamic task description properties of the marionette-harness task.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def setup_task(config, tasks):
+ for task in tasks:
+ del task['name']
+ task['label'] = 'marionette-harness'
+ env = task['worker'].setdefault('env', {})
+ env.update({
+ 'GECKO_BASE_REPOSITORY': config.params['base_repository'],
+ 'GECKO_HEAD_REF': config.params['head_rev'],
+ 'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
+ 'GECKO_HEAD_REV': config.params['head_rev'],
+ 'MOZ_BUILD_DATE': config.params['moz_build_date'],
+ 'MOZ_SCM_LEVEL': config.params['level'],
+ })
+
+ task['worker']['caches'] = [{
+ 'type': 'persistent',
+ 'name': 'level-{}-{}-tc-vcs'.format(
+ config.params['level'], config.params['project']),
+ 'mount-point': "/home/worker/.tc-vcs",
+ }]
+
+ yield task
diff --git a/taskcluster/taskgraph/transforms/task.py b/taskcluster/taskgraph/transforms/task.py
new file mode 100644
index 000000000..6e371e4ba
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -0,0 +1,648 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+These transformations take a task description and turn it into a TaskCluster
+task definition (along with attributes, label, etc.). The input to these
+transformations is generic to any kind of task, but abstracts away some of the
+complexities of worker implementations, scopes, and treeherder annotations.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import json
+import time
+
+from taskgraph.util.treeherder import split_symbol
+from taskgraph.transforms.base import (
+ validate_schema,
+ TransformSequence
+)
+from voluptuous import Schema, Any, Required, Optional, Extra
+
+from .gecko_v2_whitelist import JOB_NAME_WHITELIST, JOB_NAME_WHITELIST_ERROR
+
+# shortcut for a string where task references are allowed
+taskref_or_string = Any(
+ basestring,
+ {Required('task-reference'): basestring})
+
+# A task description is a general description of a TaskCluster task
+task_description_schema = Schema({
+ # the label for this task
+ Required('label'): basestring,
+
+ # description of the task (for metadata)
+ Required('description'): basestring,
+
+ # attributes for this task
+ Optional('attributes'): {basestring: object},
+
+ # dependencies of this task, keyed by name; these are passed through
+ # verbatim and subject to the interpretation of the Task's get_dependencies
+ # method.
+ Optional('dependencies'): {basestring: object},
+
+ # expiration and deadline times, relative to task creation, with units
+ # (e.g., "14 days"). Defaults are set based on the project.
+ Optional('expires-after'): basestring,
+ Optional('deadline-after'): basestring,
+
+ # custom routes for this task; the default treeherder routes will be added
+ # automatically
+ Optional('routes'): [basestring],
+
+ # custom scopes for this task; any scopes required for the worker will be
+ # added automatically
+ Optional('scopes'): [basestring],
+
+ # custom "task.extra" content
+ Optional('extra'): {basestring: object},
+
+ # treeherder-related information; see
+ # https://schemas.taskcluster.net/taskcluster-treeherder/v1/task-treeherder-config.json
+ # If not specified, no treeherder extra information or routes will be
+ # added to the task
+ Optional('treeherder'): {
+ # either a bare symbol, or "grp(sym)".
+ 'symbol': basestring,
+
+ # the job kind
+ 'kind': Any('build', 'test', 'other'),
+
+ # tier for this task
+ 'tier': int,
+
+ # task platform, in the form platform/collection, used to set
+ # treeherder.machine.platform and treeherder.collection or
+ # treeherder.labels
+ 'platform': basestring,
+
+ # treeherder environments (defaults to both staging and production)
+ Required('environments', default=['production', 'staging']): ['production', 'staging'],
+ },
+
+ # information for indexing this build so its artifacts can be discovered;
+ # if omitted, the build will not be indexed.
+ Optional('index'): {
+ # the name of the product this build produces
+ 'product': Any('firefox', 'mobile'),
+
+ # the names to use for this job in the TaskCluster index
+ 'job-name': Any(
+ # Assuming the job is named "normally", this is the v2 job name,
+ # and the v1 and buildbot routes will be determined appropriately.
+ basestring,
+
+ # otherwise, give separate names for each of the legacy index
+ # routes; if a name is omitted, no corresponding route will be
+ # created.
+ {
+ # the name as it appears in buildbot routes
+ Optional('buildbot'): basestring,
+ Required('gecko-v2'): basestring,
+ }
+ ),
+
+ # The rank that the task will receive in the TaskCluster
+ # index. A newly completed task supercedes the currently
+ # indexed task iff it has a higher rank. If unspecified,
+ # 'by-tier' behavior will be used.
+ 'rank': Any(
+ # Rank is equal the timestamp of the build_date for tier-1
+ # tasks, and zero for non-tier-1. This sorts tier-{2,3}
+ # builds below tier-1 in the index.
+ 'by-tier',
+
+ # Rank is given as an integer constant (e.g. zero to make
+ # sure a task is last in the index).
+ int,
+
+ # Rank is equal to the timestamp of the build_date. This
+ # option can be used to override the 'by-tier' behavior
+ # for non-tier-1 tasks.
+ 'build_date',
+ ),
+ },
+
+ # The `run_on_projects` attribute, defaulting to "all". This dictates the
+ # projects on which this task should be included in the target task set.
+ # See the attributes documentation for details.
+ Optional('run-on-projects'): [basestring],
+
+ # If the task can be coalesced, this is the name used in the coalesce key
+ # the project, etc. will be added automatically. Note that try (level 1)
+ # tasks are never coalesced
+ Optional('coalesce-name'): basestring,
+
+ # the provisioner-id/worker-type for the task. The following parameters will
+ # be substituted in this string:
+ # {level} -- the scm level of this push
+ 'worker-type': basestring,
+
+ # information specific to the worker implementation that will run this task
+ 'worker': Any({
+ Required('implementation'): Any('docker-worker', 'docker-engine'),
+
+ # For tasks that will run in docker-worker or docker-engine, this is the
+ # name of the docker image or in-tree docker image to run the task in. If
+ # in-tree, then a dependency will be created automatically. This is
+ # generally `desktop-test`, or an image that acts an awful lot like it.
+ Required('docker-image'): Any(
+ # a raw Docker image path (repo/image:tag)
+ basestring,
+ # an in-tree generated docker image (from `testing/docker/<name>`)
+ {'in-tree': basestring}
+ ),
+
+ # worker features that should be enabled
+ Required('relengapi-proxy', default=False): bool,
+ Required('chain-of-trust', default=False): bool,
+ Required('taskcluster-proxy', default=False): bool,
+ Required('allow-ptrace', default=False): bool,
+ Required('loopback-video', default=False): bool,
+ Required('loopback-audio', default=False): bool,
+
+ # caches to set up for the task
+ Optional('caches'): [{
+ # only one type is supported by any of the workers right now
+ 'type': 'persistent',
+
+ # name of the cache, allowing re-use by subsequent tasks naming the
+ # same cache
+ 'name': basestring,
+
+ # location in the task image where the cache will be mounted
+ 'mount-point': basestring,
+ }],
+
+ # artifacts to extract from the task image after completion
+ Optional('artifacts'): [{
+ # type of artifact -- simple file, or recursive directory
+ 'type': Any('file', 'directory'),
+
+ # task image path from which to read artifact
+ 'path': basestring,
+
+ # name of the produced artifact (root of the names for
+ # type=directory)
+ 'name': basestring,
+ }],
+
+ # environment variables
+ Required('env', default={}): {basestring: taskref_or_string},
+
+ # the command to run
+ 'command': [taskref_or_string],
+
+ # the maximum time to run, in seconds
+ 'max-run-time': int,
+
+ # the exit status code that indicates the task should be retried
+ Optional('retry-exit-status'): int,
+
+ }, {
+ Required('implementation'): 'generic-worker',
+
+ # command is a list of commands to run, sequentially
+ 'command': [taskref_or_string],
+
+ # artifacts to extract from the task image after completion; note that artifacts
+ # for the generic worker cannot have names
+ Optional('artifacts'): [{
+ # type of artifact -- simple file, or recursive directory
+ 'type': Any('file', 'directory'),
+
+ # task image path from which to read artifact
+ 'path': basestring,
+ }],
+
+ # directories and/or files to be mounted
+ Optional('mounts'): [{
+ # a unique name for the cache volume
+ 'cache-name': basestring,
+
+ # task image path for the cache
+ 'path': basestring,
+ }],
+
+ # environment variables
+ Required('env', default={}): {basestring: taskref_or_string},
+
+ # the maximum time to run, in seconds
+ 'max-run-time': int,
+
+ # os user groups for test task workers
+ Optional('os-groups', default=[]): [basestring],
+ }, {
+ Required('implementation'): 'buildbot-bridge',
+
+ # see
+ # https://github.com/mozilla/buildbot-bridge/blob/master/bbb/schemas/payload.yml
+ 'buildername': basestring,
+ 'sourcestamp': {
+ 'branch': basestring,
+ Optional('revision'): basestring,
+ Optional('repository'): basestring,
+ Optional('project'): basestring,
+ },
+ 'properties': {
+ 'product': basestring,
+ Extra: basestring, # additional properties are allowed
+ },
+ }, {
+ 'implementation': 'macosx-engine',
+
+ # A link for an executable to download
+ Optional('link'): basestring,
+
+ # the command to run
+ Required('command'): [taskref_or_string],
+
+ # environment variables
+ Optional('env'): {basestring: taskref_or_string},
+
+ # artifacts to extract from the task image after completion
+ Optional('artifacts'): [{
+ # type of artifact -- simple file, or recursive directory
+ Required('type'): Any('file', 'directory'),
+
+ # task image path from which to read artifact
+ Required('path'): basestring,
+
+ # name of the produced artifact (root of the names for
+ # type=directory)
+ Required('name'): basestring,
+ }],
+ }),
+
+ # The "when" section contains descriptions of the circumstances
+ # under which this task can be "optimized", that is, left out of the
+ # task graph because it is unnecessary.
+ Optional('when'): Any({
+ # This task only needs to be run if a file matching one of the given
+ # patterns has changed in the push. The patterns use the mozpack
+ # match function (python/mozbuild/mozpack/path.py).
+ Optional('files-changed'): [basestring],
+ }),
+})
+
+GROUP_NAMES = {
+ 'tc': 'Executed by TaskCluster',
+ 'tc-e10s': 'Executed by TaskCluster with e10s',
+ 'tc-Fxfn-l': 'Firefox functional tests (local) executed by TaskCluster',
+ 'tc-Fxfn-l-e10s': 'Firefox functional tests (local) executed by TaskCluster with e10s',
+ 'tc-Fxfn-r': 'Firefox functional tests (remote) executed by TaskCluster',
+ 'tc-Fxfn-r-e10s': 'Firefox functional tests (remote) executed by TaskCluster with e10s',
+ 'tc-M': 'Mochitests executed by TaskCluster',
+ 'tc-M-e10s': 'Mochitests executed by TaskCluster with e10s',
+ 'tc-R': 'Reftests executed by TaskCluster',
+ 'tc-R-e10s': 'Reftests executed by TaskCluster with e10s',
+ 'tc-VP': 'VideoPuppeteer tests executed by TaskCluster',
+ 'tc-W': 'Web platform tests executed by TaskCluster',
+ 'tc-W-e10s': 'Web platform tests executed by TaskCluster with e10s',
+ 'tc-X': 'Xpcshell tests executed by TaskCluster',
+ 'tc-X-e10s': 'Xpcshell tests executed by TaskCluster with e10s',
+ 'Aries': 'Aries Device Image',
+ 'Nexus 5-L': 'Nexus 5-L Device Image',
+ 'Cc': 'Toolchain builds',
+ 'SM-tc': 'Spidermonkey builds',
+}
+UNKNOWN_GROUP_NAME = "Treeherder group {} has no name; add it to " + __file__
+
+BUILDBOT_ROUTE_TEMPLATES = [
+ "index.buildbot.branches.{project}.{job-name-buildbot}",
+ "index.buildbot.revisions.{head_rev}.{project}.{job-name-buildbot}",
+]
+
+V2_ROUTE_TEMPLATES = [
+ "index.gecko.v2.{project}.latest.{product}.{job-name-gecko-v2}",
+ "index.gecko.v2.{project}.pushdate.{build_date_long}.{product}.{job-name-gecko-v2}",
+ "index.gecko.v2.{project}.revision.{head_rev}.{product}.{job-name-gecko-v2}",
+]
+
+# the roots of the treeherder routes, keyed by treeherder environment
+TREEHERDER_ROUTE_ROOTS = {
+ 'production': 'tc-treeherder',
+ 'staging': 'tc-treeherder-stage',
+}
+
+COALESCE_KEY = 'builds.{project}.{name}'
+
+# define a collection of payload builders, depending on the worker implementation
+payload_builders = {}
+
+
+def payload_builder(name):
+ def wrap(func):
+ payload_builders[name] = func
+ return func
+ return wrap
+
+
+@payload_builder('docker-worker')
+def build_docker_worker_payload(config, task, task_def):
+ worker = task['worker']
+
+ image = worker['docker-image']
+ if isinstance(image, dict):
+ docker_image_task = 'build-docker-image-' + image['in-tree']
+ task.setdefault('dependencies', {})['docker-image'] = docker_image_task
+ image = {
+ "path": "public/image.tar.zst",
+ "taskId": {"task-reference": "<docker-image>"},
+ "type": "task-image",
+ }
+
+ features = {}
+
+ if worker.get('relengapi-proxy'):
+ features['relengAPIProxy'] = True
+
+ if worker.get('taskcluster-proxy'):
+ features['taskclusterProxy'] = True
+
+ if worker.get('allow-ptrace'):
+ features['allowPtrace'] = True
+ task_def['scopes'].append('docker-worker:feature:allowPtrace')
+
+ if worker.get('chain-of-trust'):
+ features['chainOfTrust'] = True
+
+ capabilities = {}
+
+ for lo in 'audio', 'video':
+ if worker.get('loopback-' + lo):
+ capitalized = 'loopback' + lo.capitalize()
+ devices = capabilities.setdefault('devices', {})
+ devices[capitalized] = True
+ task_def['scopes'].append('docker-worker:capability:device:' + capitalized)
+
+ task_def['payload'] = payload = {
+ 'command': worker['command'],
+ 'image': image,
+ 'env': worker['env'],
+ }
+
+ if 'max-run-time' in worker:
+ payload['maxRunTime'] = worker['max-run-time']
+
+ if 'retry-exit-status' in worker:
+ payload['onExitStatus'] = {'retry': [worker['retry-exit-status']]}
+
+ if 'artifacts' in worker:
+ artifacts = {}
+ for artifact in worker['artifacts']:
+ artifacts[artifact['name']] = {
+ 'path': artifact['path'],
+ 'type': artifact['type'],
+ 'expires': task_def['expires'], # always expire with the task
+ }
+ payload['artifacts'] = artifacts
+
+ if 'caches' in worker:
+ caches = {}
+ for cache in worker['caches']:
+ caches[cache['name']] = cache['mount-point']
+ task_def['scopes'].append('docker-worker:cache:' + cache['name'])
+ payload['cache'] = caches
+
+ if features:
+ payload['features'] = features
+ if capabilities:
+ payload['capabilities'] = capabilities
+
+ # coalesce / superseding
+ if 'coalesce-name' in task and int(config.params['level']) > 1:
+ key = COALESCE_KEY.format(
+ project=config.params['project'],
+ name=task['coalesce-name'])
+ payload['supersederUrl'] = "https://coalesce.mozilla-releng.net/v1/list/" + key
+
+
+@payload_builder('generic-worker')
+def build_generic_worker_payload(config, task, task_def):
+ worker = task['worker']
+
+ artifacts = []
+
+ for artifact in worker['artifacts']:
+ artifacts.append({
+ 'path': artifact['path'],
+ 'type': artifact['type'],
+ 'expires': task_def['expires'], # always expire with the task
+ })
+
+ mounts = []
+
+ for mount in worker.get('mounts', []):
+ mounts.append({
+ 'cacheName': mount['cache-name'],
+ 'directory': mount['path']
+ })
+
+ task_def['payload'] = {
+ 'command': worker['command'],
+ 'artifacts': artifacts,
+ 'env': worker.get('env', {}),
+ 'mounts': mounts,
+ 'maxRunTime': worker['max-run-time'],
+ 'osGroups': worker.get('os-groups', []),
+ }
+
+ if 'retry-exit-status' in worker:
+ raise Exception("retry-exit-status not supported in generic-worker")
+
+
+@payload_builder('macosx-engine')
+def build_macosx_engine_payload(config, task, task_def):
+ worker = task['worker']
+ artifacts = map(lambda artifact: {
+ 'name': artifact['name'],
+ 'path': artifact['path'],
+ 'type': artifact['type'],
+ 'expires': task_def['expires'],
+ }, worker['artifacts'])
+
+ task_def['payload'] = {
+ 'link': worker['link'],
+ 'command': worker['command'],
+ 'env': worker['env'],
+ 'artifacts': artifacts,
+ }
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def validate(config, tasks):
+ for task in tasks:
+ yield validate_schema(
+ task_description_schema, task,
+ "In task {!r}:".format(task.get('label', '?no-label?')))
+
+
+@transforms.add
+def add_index_routes(config, tasks):
+ for task in tasks:
+ index = task.get('index')
+ routes = task.setdefault('routes', [])
+
+ if not index:
+ yield task
+ continue
+
+ job_name = index['job-name']
+ # unpack the v2 name to v1 and buildbot names
+ if isinstance(job_name, basestring):
+ base_name, type_name = job_name.rsplit('-', 1)
+ job_name = {
+ 'buildbot': base_name,
+ 'gecko-v2': '{}-{}'.format(base_name, type_name),
+ }
+
+ if job_name['gecko-v2'] not in JOB_NAME_WHITELIST:
+ raise Exception(JOB_NAME_WHITELIST_ERROR.format(job_name['gecko-v2']))
+
+ subs = config.params.copy()
+ for n in job_name:
+ subs['job-name-' + n] = job_name[n]
+ subs['build_date_long'] = time.strftime("%Y.%m.%d.%Y%m%d%H%M%S",
+ time.gmtime(config.params['build_date']))
+ subs['product'] = index['product']
+
+ if 'buildbot' in job_name:
+ for tpl in BUILDBOT_ROUTE_TEMPLATES:
+ routes.append(tpl.format(**subs))
+ if 'gecko-v2' in job_name:
+ for tpl in V2_ROUTE_TEMPLATES:
+ routes.append(tpl.format(**subs))
+
+ # The default behavior is to rank tasks according to their tier
+ extra_index = task.setdefault('extra', {}).setdefault('index', {})
+ rank = index.get('rank', 'by-tier')
+
+ if rank == 'by-tier':
+ # rank is zero for non-tier-1 tasks and based on pushid for others;
+ # this sorts tier-{2,3} builds below tier-1 in the index
+ tier = task.get('treeherder', {}).get('tier', 3)
+ extra_index['rank'] = 0 if tier > 1 else int(config.params['build_date'])
+ elif rank == 'build_date':
+ extra_index['rank'] = int(config.params['build_date'])
+ else:
+ extra_index['rank'] = rank
+
+ del task['index']
+ yield task
+
+
+@transforms.add
+def build_task(config, tasks):
+ for task in tasks:
+ worker_type = task['worker-type'].format(level=str(config.params['level']))
+ provisioner_id, worker_type = worker_type.split('/', 1)
+
+ routes = task.get('routes', [])
+ scopes = task.get('scopes', [])
+
+ # set up extra
+ extra = task.get('extra', {})
+ task_th = task.get('treeherder')
+ if task_th:
+ extra['treeherderEnv'] = task_th['environments']
+
+ treeherder = extra.setdefault('treeherder', {})
+
+ machine_platform, collection = task_th['platform'].split('/', 1)
+ treeherder['machine'] = {'platform': machine_platform}
+ treeherder['collection'] = {collection: True}
+
+ groupSymbol, symbol = split_symbol(task_th['symbol'])
+ if groupSymbol != '?':
+ treeherder['groupSymbol'] = groupSymbol
+ if groupSymbol not in GROUP_NAMES:
+ raise Exception(UNKNOWN_GROUP_NAME.format(groupSymbol))
+ treeherder['groupName'] = GROUP_NAMES[groupSymbol]
+ treeherder['symbol'] = symbol
+ treeherder['jobKind'] = task_th['kind']
+ treeherder['tier'] = task_th['tier']
+
+ routes.extend([
+ '{}.v2.{}.{}.{}'.format(TREEHERDER_ROUTE_ROOTS[env],
+ config.params['project'],
+ config.params['head_rev'],
+ config.params['pushlog_id'])
+ for env in task_th['environments']
+ ])
+
+ if 'expires-after' not in task:
+ task['expires-after'] = '28 days' if config.params['project'] == 'try' else '1 year'
+
+ if 'deadline-after' not in task:
+ task['deadline-after'] = '1 day'
+
+ if 'coalesce-name' in task and int(config.params['level']) > 1:
+ key = COALESCE_KEY.format(
+ project=config.params['project'],
+ name=task['coalesce-name'])
+ routes.append('coalesce.v1.' + key)
+
+ task_def = {
+ 'provisionerId': provisioner_id,
+ 'workerType': worker_type,
+ 'routes': routes,
+ 'created': {'relative-datestamp': '0 seconds'},
+ 'deadline': {'relative-datestamp': task['deadline-after']},
+ 'expires': {'relative-datestamp': task['expires-after']},
+ 'scopes': scopes,
+ 'metadata': {
+ 'description': task['description'],
+ 'name': task['label'],
+ 'owner': config.params['owner'],
+ 'source': '{}/file/{}/{}'.format(
+ config.params['head_repository'],
+ config.params['head_rev'],
+ config.path),
+ },
+ 'extra': extra,
+ 'tags': {'createdForUser': config.params['owner']},
+ }
+
+ # add the payload and adjust anything else as required (e.g., scopes)
+ payload_builders[task['worker']['implementation']](config, task, task_def)
+
+ attributes = task.get('attributes', {})
+ attributes['run_on_projects'] = task.get('run-on-projects', ['all'])
+
+ yield {
+ 'label': task['label'],
+ 'task': task_def,
+ 'dependencies': task.get('dependencies', {}),
+ 'attributes': attributes,
+ 'when': task.get('when', {}),
+ }
+
+
+# Check that the v2 route templates match those used by Mozharness. This can
+# go away once Mozharness builds are no longer performed in Buildbot, and the
+# Mozharness code referencing routes.json is deleted.
+def check_v2_routes():
+ with open("testing/mozharness/configs/routes.json", "rb") as f:
+ routes_json = json.load(f)
+
+ # we only deal with the 'routes' key here
+ routes = routes_json['routes']
+
+ # we use different variables than mozharness
+ for mh, tg in [
+ ('{index}', 'index'),
+ ('{build_product}', '{product}'),
+ ('{build_name}-{build_type}', '{job-name-gecko-v2}'),
+ ('{year}.{month}.{day}.{pushdate}', '{build_date_long}')]:
+ routes = [r.replace(mh, tg) for r in routes]
+
+ if sorted(routes) != sorted(V2_ROUTE_TEMPLATES):
+ raise Exception("V2_ROUTE_TEMPLATES does not match Mozharness's routes.json: "
+ "%s vs %s" % (V2_ROUTE_TEMPLATES, routes))
+
+check_v2_routes()
diff --git a/taskcluster/taskgraph/transforms/tests/__init__.py b/taskcluster/taskgraph/transforms/tests/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/tests/__init__.py
diff --git a/taskcluster/taskgraph/transforms/tests/all_kinds.py b/taskcluster/taskgraph/transforms/tests/all_kinds.py
new file mode 100644
index 000000000..f2aa1f841
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/tests/all_kinds.py
@@ -0,0 +1,137 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Changes here apply to all tests, regardless of kind.
+
+This is a great place for:
+
+ * Applying rules based on platform, project, etc. that should span kinds
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.util.treeherder import split_symbol, join_symbol
+from taskgraph.transforms.base import TransformSequence, get_keyed_by
+
+import copy
+
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_worker_implementation(config, tests):
+ """Set the worker implementation based on the test platform."""
+ for test in tests:
+ if test['test-platform'].startswith('win'):
+ test['worker-implementation'] = 'generic-worker'
+ elif test['test-platform'].startswith('macosx'):
+ test['worker-implementation'] = 'macosx-engine'
+ else:
+ test['worker-implementation'] = 'docker-worker'
+ yield test
+
+
+@transforms.add
+def set_tier(config, tests):
+ """Set the tier based on policy for all test descriptions that do not
+ specify a tier otherwise."""
+ for test in tests:
+ # only override if not set for the test
+ if 'tier' not in test:
+ if test['test-platform'] in ['linux64/debug',
+ 'linux64-asan/opt',
+ 'android-4.3-arm7-api-15/debug',
+ 'android-x86/opt']:
+ test['tier'] = 1
+ else:
+ test['tier'] = 2
+ yield test
+
+
+@transforms.add
+def set_expires_after(config, tests):
+ """Try jobs expire after 2 weeks; everything else lasts 1 year. This helps
+ keep storage costs low."""
+ for test in tests:
+ if 'expires-after' not in test:
+ if config.params['project'] == 'try':
+ test['expires-after'] = "14 days"
+ else:
+ test['expires-after'] = "1 year"
+ yield test
+
+
+@transforms.add
+def set_download_symbols(config, tests):
+ """In general, we download symbols immediately for debug builds, but only
+ on demand for everything else. ASAN builds shouldn't download
+ symbols since they don't product symbol zips see bug 1283879"""
+ for test in tests:
+ if test['test-platform'].split('/')[-1] == 'debug':
+ test['mozharness']['download-symbols'] = True
+ elif test['build-platform'] == 'linux64-asan/opt':
+ if 'download-symbols' in test['mozharness']:
+ del test['mozharness']['download-symbols']
+ else:
+ test['mozharness']['download-symbols'] = 'ondemand'
+ yield test
+
+
+@transforms.add
+def resolve_keyed_by(config, tests):
+ """Resolve fields that can be keyed by platform, etc."""
+ fields = [
+ 'instance-size',
+ 'max-run-time',
+ 'chunks',
+ 'e10s',
+ 'suite',
+ 'run-on-projects',
+ ]
+ for test in tests:
+ for field in fields:
+ test[field] = get_keyed_by(item=test, field=field, item_name=test['test-name'])
+ test['mozharness']['config'] = get_keyed_by(item=test,
+ field='mozharness',
+ subfield='config',
+ item_name=test['test-name'])
+ test['mozharness']['extra-options'] = get_keyed_by(item=test,
+ field='mozharness',
+ subfield='extra-options',
+ item_name=test['test-name'])
+ yield test
+
+
+@transforms.add
+def split_chunks(config, tests):
+ """Based on the 'chunks' key, split tests up into chunks by duplicating
+ them and assigning 'this-chunk' appropriately and updating the treeherder
+ symbol."""
+ for test in tests:
+ if test['chunks'] == 1:
+ test['this-chunk'] = 1
+ yield test
+ continue
+
+ for this_chunk in range(1, test['chunks'] + 1):
+ # copy the test and update with the chunk number
+ chunked = copy.deepcopy(test)
+ chunked['this-chunk'] = this_chunk
+
+ # add the chunk number to the TH symbol
+ group, symbol = split_symbol(chunked['treeherder-symbol'])
+ symbol += str(this_chunk)
+ chunked['treeherder-symbol'] = join_symbol(group, symbol)
+
+ yield chunked
+
+
+@transforms.add
+def set_retry_exit_status(config, tests):
+ """Set the retry exit status to TBPL_RETRY, the value returned by mozharness
+ scripts to indicate a transient failure that should be retried."""
+ for test in tests:
+ test['retry-exit-status'] = 4
+ yield test
diff --git a/taskcluster/taskgraph/transforms/tests/android_test.py b/taskcluster/taskgraph/transforms/tests/android_test.py
new file mode 100644
index 000000000..7c13b16f5
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/tests/android_test.py
@@ -0,0 +1,42 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+These transforms are specific to the android-test kind, and apply defaults to
+the test descriptions appropriate to that kind.
+
+Both the input to and output from these transforms must conform to
+`taskgraph.transforms.tests.test:test_schema`.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+from taskgraph.transforms.base import TransformSequence
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_defaults(config, tests):
+ for test in tests:
+ # all Android test tasks download internal objects from tooltool
+ test['mozharness']['tooltool-downloads'] = True
+ test['mozharness']['build-artifact-name'] = 'public/build/target.apk'
+ test['mozharness']['actions'] = ['get-secrets']
+ yield test
+
+
+@transforms.add
+def set_treeherder_machine_platform(config, tests):
+ """Set the appropriate task.extra.treeherder.machine.platform"""
+ # The build names for these build platforms have partially evolved over the
+ # years.. This is temporary until we can clean up the handling of
+ # platforms
+ translation = {
+ 'android-api-15/debug': 'android-4-3-armv7-api15/debug',
+ 'android-api-15/opt': 'android-4-3-armv7-api15/opt',
+ 'android-x86/opt': 'android-4-2-x86/opt',
+ }
+ for test in tests:
+ build_platform = test['build-platform']
+ test['treeherder-machine-platform'] = translation.get(build_platform, build_platform)
+ yield test
diff --git a/taskcluster/taskgraph/transforms/tests/desktop_test.py b/taskcluster/taskgraph/transforms/tests/desktop_test.py
new file mode 100644
index 000000000..44a907903
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/tests/desktop_test.py
@@ -0,0 +1,118 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+These transforms are specific to the desktop-test kind, and apply defaults to
+the test descriptions appropriate to that kind.
+
+Both the input to and output from these transforms must conform to
+`taskgraph.transforms.tests.test:test_schema`.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+from taskgraph.transforms.base import TransformSequence, get_keyed_by
+from taskgraph.util.treeherder import split_symbol, join_symbol
+
+import copy
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def set_defaults(config, tests):
+ for test in tests:
+ build_platform = test['build-platform']
+ if build_platform.startswith('macosx'):
+ target = 'target.dmg'
+ else:
+ target = 'target.tar.bz2'
+ test['mozharness']['build-artifact-name'] = 'public/build/' + target
+ # all desktop tests want to run the bits that require node
+ test['mozharness']['set-moz-node-path'] = True
+ yield test
+
+
+@transforms.add
+def set_treeherder_machine_platform(config, tests):
+ """Set the appropriate task.extra.treeherder.machine.platform"""
+ # Linux64 build platforms for asan and pgo are specified differently to
+ # treeherder. This is temporary until we can clean up the handling of
+ # platforms
+ translation = {
+ 'linux64-asan/opt': 'linux64/asan',
+ 'linux64-pgo/opt': 'linux64/pgo',
+ 'macosx64/debug': 'osx-10-10/debug',
+ 'macosx64/opt': 'osx-10-10/opt',
+ }
+ for test in tests:
+ build_platform = test['build-platform']
+ test_platform = test['test-platform']
+ test['treeherder-machine-platform'] = translation.get(build_platform, test_platform)
+ yield test
+
+
+@transforms.add
+def set_asan_docker_image(config, tests):
+ """Set the appropriate task.extra.treeherder.docker-image"""
+ # Linux64-asan has many leaks with running mochitest-media jobs
+ # on Ubuntu 16.04, please remove this when bug 1289209 is resolved
+ for test in tests:
+ if test['suite'] == 'mochitest/mochitest-media' and \
+ test['build-platform'] == 'linux64-asan/opt':
+ test['docker-image'] = {"in-tree": "desktop-test"}
+ yield test
+
+
+@transforms.add
+def split_e10s(config, tests):
+ for test in tests:
+ e10s = get_keyed_by(item=test, field='e10s',
+ item_name=test['test-name'])
+ test.setdefault('attributes', {})
+ test['e10s'] = False
+ test['attributes']['e10s'] = False
+
+ if e10s == 'both':
+ yield test
+ test = copy.deepcopy(test)
+ e10s = True
+ if e10s:
+ test['test-name'] += '-e10s'
+ test['e10s'] = True
+ test['attributes']['e10s'] = True
+ group, symbol = split_symbol(test['treeherder-symbol'])
+ if group != '?':
+ group += '-e10s'
+ test['treeherder-symbol'] = join_symbol(group, symbol)
+ test['mozharness'].setdefault('extra-options', []).append('--e10s')
+ yield test
+
+
+@transforms.add
+def allow_software_gl_layers(config, tests):
+ for test in tests:
+
+ # since this value defaults to true, but is not applicable on windows,
+ # it's overriden for that platform here.
+ allow = not test['test-platform'].startswith('win') \
+ and get_keyed_by(item=test, field='allow-software-gl-layers',
+ item_name=test['test-name'])
+ if allow:
+ assert test['instance-size'] != 'legacy',\
+ 'Software GL layers on a legacy instance is disallowed (bug 1296086).'
+
+ # This should be set always once bug 1296086 is resolved.
+ test['mozharness'].setdefault('extra-options', [])\
+ .append("--allow-software-gl-layers")
+
+ yield test
+
+
+@transforms.add
+def add_os_groups(config, tests):
+ for test in tests:
+ if test['test-platform'].startswith('win'):
+ groups = get_keyed_by(item=test, field='os-groups', item_name=test['test-name'])
+ if groups:
+ test['os-groups'] = groups
+ yield test
diff --git a/taskcluster/taskgraph/transforms/tests/make_task_description.py b/taskcluster/taskgraph/transforms/tests/make_task_description.py
new file mode 100644
index 000000000..fc3f94893
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/tests/make_task_description.py
@@ -0,0 +1,445 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+These transforms construct a task description to run the given test, based on a
+test description. The implementation here is shared among all test kinds, but
+contains specific support for how we run tests in Gecko (via mozharness,
+invoked in particular ways).
+
+This is a good place to translate a test-description option such as
+`single-core: true` to the implementation of that option in a task description
+(worker options, mozharness commandline, environment variables, etc.)
+
+The test description should be fully formed by the time it reaches these
+transforms, and these transforms should not embody any specific knowledge about
+what should run where. this is the wrong place for special-casing platforms,
+for example - use `all_tests.py` instead.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.base import TransformSequence
+from taskgraph.transforms.job.common import (
+ docker_worker_support_vcs_checkout,
+)
+
+import logging
+import os.path
+
+ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
+WORKER_TYPE = {
+ # default worker types keyed by instance-size
+ 'large': 'aws-provisioner-v1/gecko-t-linux-large',
+ 'xlarge': 'aws-provisioner-v1/gecko-t-linux-xlarge',
+ 'legacy': 'aws-provisioner-v1/gecko-t-linux-medium',
+ 'default': 'aws-provisioner-v1/gecko-t-linux-large',
+ # windows worker types keyed by test-platform
+ 'windows7-32-vm': 'aws-provisioner-v1/gecko-t-win7-32',
+ 'windows7-32': 'aws-provisioner-v1/gecko-t-win7-32-gpu',
+ 'windows10-64-vm': 'aws-provisioner-v1/gecko-t-win10-64',
+ 'windows10-64': 'aws-provisioner-v1/gecko-t-win10-64-gpu'
+}
+
+ARTIFACTS = [
+ # (artifact name prefix, in-image path)
+ ("public/logs/", "build/upload/logs/"),
+ ("public/test", "artifacts/"),
+ ("public/test_info/", "build/blobber_upload_dir/"),
+]
+
+logger = logging.getLogger(__name__)
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def make_task_description(config, tests):
+ """Convert *test* descriptions to *task* descriptions (input to
+ taskgraph.transforms.task)"""
+
+ for test in tests:
+ label = '{}-{}-{}'.format(config.kind, test['test-platform'], test['test-name'])
+ if test['chunks'] > 1:
+ label += '-{}'.format(test['this-chunk'])
+
+ build_label = test['build-label']
+
+ unittest_try_name = test.get('unittest-try-name', test['test-name'])
+
+ attr_build_platform, attr_build_type = test['build-platform'].split('/', 1)
+
+ suite = test['suite']
+ if '/' in suite:
+ suite, flavor = suite.split('/', 1)
+ else:
+ flavor = suite
+
+ attributes = test.get('attributes', {})
+ attributes.update({
+ 'build_platform': attr_build_platform,
+ 'build_type': attr_build_type,
+ # only keep the first portion of the test platform
+ 'test_platform': test['test-platform'].split('/')[0],
+ 'test_chunk': str(test['this-chunk']),
+ 'unittest_suite': suite,
+ 'unittest_flavor': flavor,
+ 'unittest_try_name': unittest_try_name,
+ })
+
+ taskdesc = {}
+ taskdesc['label'] = label
+ taskdesc['description'] = test['description']
+ taskdesc['attributes'] = attributes
+ taskdesc['dependencies'] = {'build': build_label}
+ taskdesc['deadline-after'] = '1 day'
+ taskdesc['expires-after'] = test['expires-after']
+ taskdesc['routes'] = []
+ taskdesc['run-on-projects'] = test.get('run-on-projects', ['all'])
+ taskdesc['scopes'] = []
+ taskdesc['extra'] = {
+ 'chunks': {
+ 'current': test['this-chunk'],
+ 'total': test['chunks'],
+ },
+ 'suite': {
+ 'name': suite,
+ 'flavor': flavor,
+ },
+ }
+ taskdesc['treeherder'] = {
+ 'symbol': test['treeherder-symbol'],
+ 'kind': 'test',
+ 'tier': test['tier'],
+ 'platform': test.get('treeherder-machine-platform', test['build-platform']),
+ }
+
+ # the remainder (the worker-type and worker) differs depending on the
+ # worker implementation
+ worker_setup_functions[test['worker-implementation']](config, test, taskdesc)
+
+ # yield only the task description, discarding the test description
+ yield taskdesc
+
+
+worker_setup_functions = {}
+
+
+def worker_setup_function(name):
+ def wrap(func):
+ worker_setup_functions[name] = func
+ return func
+ return wrap
+
+
+@worker_setup_function("docker-engine")
+@worker_setup_function("docker-worker")
+def docker_worker_setup(config, test, taskdesc):
+
+ artifacts = [
+ # (artifact name prefix, in-image path)
+ ("public/logs/", "/home/worker/workspace/build/upload/logs/"),
+ ("public/test", "/home/worker/artifacts/"),
+ ("public/test_info/", "/home/worker/workspace/build/blobber_upload_dir/"),
+ ]
+ mozharness = test['mozharness']
+
+ installer_url = ARTIFACT_URL.format('<build>', mozharness['build-artifact-name'])
+ test_packages_url = ARTIFACT_URL.format('<build>',
+ 'public/build/target.test_packages.json')
+ mozharness_url = ARTIFACT_URL.format('<build>',
+ 'public/build/mozharness.zip')
+
+ taskdesc['worker-type'] = WORKER_TYPE[test['instance-size']]
+
+ worker = taskdesc['worker'] = {}
+ worker['implementation'] = test['worker-implementation']
+ worker['docker-image'] = test['docker-image']
+
+ worker['allow-ptrace'] = True # required for all tests, for crashreporter
+ worker['relengapi-proxy'] = False # but maybe enabled for tooltool below
+ worker['loopback-video'] = test['loopback-video']
+ worker['loopback-audio'] = test['loopback-audio']
+ worker['max-run-time'] = test['max-run-time']
+ worker['retry-exit-status'] = test['retry-exit-status']
+
+ worker['artifacts'] = [{
+ 'name': prefix,
+ 'path': os.path.join('/home/worker/workspace', path),
+ 'type': 'directory',
+ } for (prefix, path) in artifacts]
+
+ worker['caches'] = [{
+ 'type': 'persistent',
+ 'name': 'level-{}-{}-test-workspace'.format(
+ config.params['level'], config.params['project']),
+ 'mount-point': "/home/worker/workspace",
+ }]
+
+ env = worker['env'] = {
+ 'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
+ 'MOZHARNESS_SCRIPT': mozharness['script'],
+ 'MOZILLA_BUILD_URL': {'task-reference': installer_url},
+ 'NEED_PULSEAUDIO': 'true',
+ 'NEED_WINDOW_MANAGER': 'true',
+ }
+
+ if mozharness['set-moz-node-path']:
+ env['MOZ_NODE_PATH'] = '/usr/local/bin/node'
+
+ if 'actions' in mozharness:
+ env['MOZHARNESS_ACTIONS'] = ' '.join(mozharness['actions'])
+
+ if config.params['project'] == 'try':
+ env['TRY_COMMIT_MSG'] = config.params['message']
+
+ # handle some of the mozharness-specific options
+
+ if mozharness['tooltool-downloads']:
+ worker['relengapi-proxy'] = True
+ worker['caches'].append({
+ 'type': 'persistent',
+ 'name': 'tooltool-cache',
+ 'mount-point': '/home/worker/tooltool-cache',
+ })
+ taskdesc['scopes'].extend([
+ 'docker-worker:relengapi-proxy:tooltool.download.internal',
+ 'docker-worker:relengapi-proxy:tooltool.download.public',
+ ])
+
+ # assemble the command line
+ command = [
+ '/home/worker/bin/run-task',
+ # The workspace cache/volume is default owned by root:root.
+ '--chown', '/home/worker/workspace',
+ ]
+
+ # Support vcs checkouts regardless of whether the task runs from
+ # source or not in case it is needed on an interactive loaner.
+ docker_worker_support_vcs_checkout(config, test, taskdesc)
+
+ # If we have a source checkout, run mozharness from it instead of
+ # downloading a zip file with the same content.
+ if test['checkout']:
+ command.extend(['--vcs-checkout', '/home/worker/checkouts/gecko'])
+ env['MOZHARNESS_PATH'] = '/home/worker/checkouts/gecko/testing/mozharness'
+ else:
+ env['MOZHARNESS_URL'] = {'task-reference': mozharness_url}
+
+ command.extend([
+ '--',
+ '/home/worker/bin/test-linux.sh',
+ ])
+
+ if mozharness.get('no-read-buildbot-config'):
+ command.append("--no-read-buildbot-config")
+ command.extend([
+ {"task-reference": "--installer-url=" + installer_url},
+ {"task-reference": "--test-packages-url=" + test_packages_url},
+ ])
+ command.extend(mozharness.get('extra-options', []))
+
+ # TODO: remove the need for run['chunked']
+ if mozharness.get('chunked') or test['chunks'] > 1:
+ # Implement mozharness['chunking-args'], modifying command in place
+ if mozharness['chunking-args'] == 'this-chunk':
+ command.append('--total-chunk={}'.format(test['chunks']))
+ command.append('--this-chunk={}'.format(test['this-chunk']))
+ elif mozharness['chunking-args'] == 'test-suite-suffix':
+ suffix = mozharness['chunk-suffix'].replace('<CHUNK>', str(test['this-chunk']))
+ for i, c in enumerate(command):
+ if isinstance(c, basestring) and c.startswith('--test-suite'):
+ command[i] += suffix
+
+ if 'download-symbols' in mozharness:
+ download_symbols = mozharness['download-symbols']
+ download_symbols = {True: 'true', False: 'false'}.get(download_symbols, download_symbols)
+ command.append('--download-symbols=' + download_symbols)
+
+ worker['command'] = command
+
+
+def normpath(path):
+ return path.replace('/', '\\')
+
+
+def get_firefox_version():
+ with open('browser/config/version.txt', 'r') as f:
+ return f.readline().strip()
+
+
+@worker_setup_function('generic-worker')
+def generic_worker_setup(config, test, taskdesc):
+ artifacts = [
+ {
+ 'path': 'public\\logs\\localconfig.json',
+ 'type': 'file'
+ },
+ {
+ 'path': 'public\\logs\\log_critical.log',
+ 'type': 'file'
+ },
+ {
+ 'path': 'public\\logs\\log_error.log',
+ 'type': 'file'
+ },
+ {
+ 'path': 'public\\logs\\log_fatal.log',
+ 'type': 'file'
+ },
+ {
+ 'path': 'public\\logs\\log_info.log',
+ 'type': 'file'
+ },
+ {
+ 'path': 'public\\logs\\log_raw.log',
+ 'type': 'file'
+ },
+ {
+ 'path': 'public\\logs\\log_warning.log',
+ 'type': 'file'
+ },
+ {
+ 'path': 'public\\test_info',
+ 'type': 'directory'
+ }
+ ]
+ mozharness = test['mozharness']
+
+ build_platform = taskdesc['attributes']['build_platform']
+ test_platform = test['test-platform'].split('/')[0]
+
+ target = 'firefox-{}.en-US.{}'.format(get_firefox_version(), build_platform)
+
+ installer_url = ARTIFACT_URL.format(
+ '<build>', 'public/build/{}.zip'.format(target))
+ test_packages_url = ARTIFACT_URL.format(
+ '<build>', 'public/build/{}.test_packages.json'.format(target))
+ mozharness_url = ARTIFACT_URL.format(
+ '<build>', 'public/build/mozharness.zip')
+
+ taskdesc['worker-type'] = WORKER_TYPE[test_platform]
+
+ taskdesc['scopes'].extend(
+ ['generic-worker:os-group:{}'.format(group) for group in test['os-groups']])
+
+ worker = taskdesc['worker'] = {}
+ worker['os-groups'] = test['os-groups']
+ worker['implementation'] = test['worker-implementation']
+ worker['max-run-time'] = test['max-run-time']
+ worker['artifacts'] = artifacts
+
+ env = worker['env'] = {
+ # Bug 1306989
+ 'APPDATA': '%cd%\\AppData\\Roaming',
+ 'LOCALAPPDATA': '%cd%\\AppData\\Local',
+ 'TEMP': '%cd%\\AppData\\Local\\Temp',
+ 'TMP': '%cd%\\AppData\\Local\\Temp',
+ 'USERPROFILE': '%cd%',
+ }
+
+ # assemble the command line
+ mh_command = [
+ 'c:\\mozilla-build\\python\\python.exe',
+ '-u',
+ 'mozharness\\scripts\\' + normpath(mozharness['script'])
+ ]
+ for mh_config in mozharness['config']:
+ mh_command.extend(['--cfg', 'mozharness\\configs\\' + normpath(mh_config)])
+ mh_command.extend(mozharness.get('extra-options', []))
+ if mozharness.get('no-read-buildbot-config'):
+ mh_command.append('--no-read-buildbot-config')
+ mh_command.extend(['--installer-url', installer_url])
+ mh_command.extend(['--test-packages-url', test_packages_url])
+ if mozharness.get('download-symbols'):
+ if isinstance(mozharness['download-symbols'], basestring):
+ mh_command.extend(['--download-symbols', mozharness['download-symbols']])
+ else:
+ mh_command.extend(['--download-symbols', 'true'])
+
+ # TODO: remove the need for run['chunked']
+ if mozharness.get('chunked') or test['chunks'] > 1:
+ # Implement mozharness['chunking-args'], modifying command in place
+ if mozharness['chunking-args'] == 'this-chunk':
+ mh_command.append('--total-chunk={}'.format(test['chunks']))
+ mh_command.append('--this-chunk={}'.format(test['this-chunk']))
+ elif mozharness['chunking-args'] == 'test-suite-suffix':
+ suffix = mozharness['chunk-suffix'].replace('<CHUNK>', str(test['this-chunk']))
+ for i, c in enumerate(mh_command):
+ if isinstance(c, basestring) and c.startswith('--test-suite'):
+ mh_command[i] += suffix
+
+ worker['command'] = [
+ 'mkdir {} {}'.format(env['APPDATA'], env['TMP']),
+ {'task-reference': 'c:\\mozilla-build\\wget\\wget.exe {}'.format(mozharness_url)},
+ 'c:\\mozilla-build\\info-zip\\unzip.exe mozharness.zip',
+ {'task-reference': ' '.join(mh_command)},
+ 'xcopy build\\blobber_upload_dir public\\test_info /e /i',
+ 'copy /y logs\\*.* public\\logs\\'
+ ]
+
+
+@worker_setup_function("macosx-engine")
+def macosx_engine_setup(config, test, taskdesc):
+ mozharness = test['mozharness']
+
+ installer_url = ARTIFACT_URL.format('<build>', mozharness['build-artifact-name'])
+ test_packages_url = ARTIFACT_URL.format('<build>',
+ 'public/build/target.test_packages.json')
+ mozharness_url = ARTIFACT_URL.format('<build>',
+ 'public/build/mozharness.zip')
+
+ # for now we have only 10.10 machines
+ taskdesc['worker-type'] = 'tc-worker-provisioner/gecko-t-osx-10-10'
+
+ worker = taskdesc['worker'] = {}
+ worker['implementation'] = test['worker-implementation']
+
+ worker['artifacts'] = [{
+ 'name': prefix.rstrip('/'),
+ 'path': path.rstrip('/'),
+ 'type': 'directory',
+ } for (prefix, path) in ARTIFACTS]
+
+ worker['env'] = {
+ 'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
+ 'GECKO_HEAD_REV': config.params['head_rev'],
+ 'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
+ 'MOZHARNESS_SCRIPT': mozharness['script'],
+ 'MOZHARNESS_URL': {'task-reference': mozharness_url},
+ 'MOZILLA_BUILD_URL': {'task-reference': installer_url},
+ }
+
+ # assemble the command line
+
+ worker['link'] = '{}/raw-file/{}/taskcluster/scripts/tester/test-macosx.sh'.format(
+ config.params['head_repository'], config.params['head_rev']
+ )
+
+ command = worker['command'] = ["./test-macosx.sh"]
+ if mozharness.get('no-read-buildbot-config'):
+ command.append("--no-read-buildbot-config")
+ command.extend([
+ {"task-reference": "--installer-url=" + installer_url},
+ {"task-reference": "--test-packages-url=" + test_packages_url},
+ ])
+ if mozharness.get('include-blob-upload-branch'):
+ command.append('--blob-upload-branch=' + config.params['project'])
+ command.extend(mozharness.get('extra-options', []))
+
+ # TODO: remove the need for run['chunked']
+ if mozharness.get('chunked') or test['chunks'] > 1:
+ # Implement mozharness['chunking-args'], modifying command in place
+ if mozharness['chunking-args'] == 'this-chunk':
+ command.append('--total-chunk={}'.format(test['chunks']))
+ command.append('--this-chunk={}'.format(test['this-chunk']))
+ elif mozharness['chunking-args'] == 'test-suite-suffix':
+ suffix = mozharness['chunk-suffix'].replace('<CHUNK>', str(test['this-chunk']))
+ for i, c in enumerate(command):
+ if isinstance(c, basestring) and c.startswith('--test-suite'):
+ command[i] += suffix
+
+ if 'download-symbols' in mozharness:
+ download_symbols = mozharness['download-symbols']
+ download_symbols = {True: 'true', False: 'false'}.get(download_symbols, download_symbols)
+ command.append('--download-symbols=' + download_symbols)
diff --git a/taskcluster/taskgraph/transforms/tests/test_description.py b/taskcluster/taskgraph/transforms/tests/test_description.py
new file mode 100644
index 000000000..1365919fe
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/tests/test_description.py
@@ -0,0 +1,235 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+This file defines the schema for tests -- the things in `tests.yml`. It should
+be run both before and after the kind-specific transforms, to ensure that the
+transforms do not generate invalid tests.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.base import validate_schema
+from voluptuous import (
+ Any,
+ Optional,
+ Required,
+ Schema,
+)
+
+
+# Schema for a test description
+#
+# *****WARNING*****
+#
+# This is a great place for baffling cruft to accumulate, and that makes
+# everyone move more slowly. Be considerate of your fellow hackers!
+# See the warnings in taskcluster/docs/how-tos.rst
+#
+# *****WARNING*****
+test_description_schema = Schema({
+ # description of the suite, for the task metadata
+ 'description': basestring,
+
+ # test suite name, or <suite>/<flavor>
+ Required('suite'): Any(
+ basestring,
+ {'by-test-platform': {basestring: basestring}},
+ ),
+
+ # the name by which this test suite is addressed in try syntax; defaults to
+ # the test-name
+ Optional('unittest-try-name'): basestring,
+
+ # the symbol, or group(symbol), under which this task should appear in
+ # treeherder.
+ 'treeherder-symbol': basestring,
+
+ # the value to place in task.extra.treeherder.machine.platform; ideally
+ # this is the same as build-platform, and that is the default, but in
+ # practice it's not always a match.
+ Optional('treeherder-machine-platform'): basestring,
+
+ # attributes to appear in the resulting task (later transforms will add the
+ # common attributes)
+ Optional('attributes'): {basestring: object},
+
+ # The `run_on_projects` attribute, defaulting to "all". This dictates the
+ # projects on which this task should be included in the target task set.
+ # See the attributes documentation for details.
+ Optional('run-on-projects', default=['all']): Any(
+ [basestring],
+ {'by-test-platform': {basestring: [basestring]}},
+ ),
+
+ # the sheriffing tier for this task (default: set based on test platform)
+ Optional('tier'): int,
+
+ # number of chunks to create for this task. This can be keyed by test
+ # platform by passing a dictionary in the `by-test-platform` key. If the
+ # test platform is not found, the key 'default' will be tried.
+ Required('chunks', default=1): Any(
+ int,
+ {'by-test-platform': {basestring: int}},
+ ),
+
+ # the time (with unit) after which this task is deleted; default depends on
+ # the branch (see below)
+ Optional('expires-after'): basestring,
+
+ # Whether to run this task with e10s (desktop-test only). If false, run
+ # without e10s; if true, run with e10s; if 'both', run one task with and
+ # one task without e10s. E10s tasks have "-e10s" appended to the test name
+ # and treeherder group.
+ Required('e10s', default='both'): Any(
+ bool, 'both',
+ {'by-test-platform': {basestring: Any(bool, 'both')}},
+ ),
+
+ # The EC2 instance size to run these tests on.
+ Required('instance-size', default='default'): Any(
+ Any('default', 'large', 'xlarge', 'legacy'),
+ {'by-test-platform': {basestring: Any('default', 'large', 'xlarge', 'legacy')}},
+ ),
+
+ # Whether the task requires loopback audio or video (whatever that may mean
+ # on the platform)
+ Required('loopback-audio', default=False): bool,
+ Required('loopback-video', default=False): bool,
+
+ # Whether the test can run using a software GL implementation on Linux
+ # using the GL compositor. May not be used with "legacy" sized instances
+ # due to poor LLVMPipe performance (bug 1296086).
+ Optional('allow-software-gl-layers', default=True): bool,
+
+ # The worker implementation for this test, as dictated by policy and by the
+ # test platform.
+ Optional('worker-implementation'): Any(
+ 'docker-worker',
+ 'macosx-engine',
+ 'generic-worker',
+ # coming soon:
+ 'docker-engine',
+ 'buildbot-bridge',
+ ),
+
+ # For tasks that will run in docker-worker or docker-engine, this is the
+ # name of the docker image or in-tree docker image to run the task in. If
+ # in-tree, then a dependency will be created automatically. This is
+ # generally `desktop-test`, or an image that acts an awful lot like it.
+ Required('docker-image', default={'in-tree': 'desktop-test'}): Any(
+ # a raw Docker image path (repo/image:tag)
+ basestring,
+ # an in-tree generated docker image (from `testing/docker/<name>`)
+ {'in-tree': basestring}
+ ),
+
+ # seconds of runtime after which the task will be killed. Like 'chunks',
+ # this can be keyed by test pltaform.
+ Required('max-run-time', default=3600): Any(
+ int,
+ {'by-test-platform': {basestring: int}},
+ ),
+
+ # the exit status code that indicates the task should be retried
+ Optional('retry-exit-status'): int,
+
+ # Whether to perform a gecko checkout.
+ Required('checkout', default=False): bool,
+
+ # What to run
+ Required('mozharness'): Any({
+ # the mozharness script used to run this task
+ Required('script'): basestring,
+
+ # the config files required for the task
+ Required('config'): Any(
+ [basestring],
+ {'by-test-platform': {basestring: [basestring]}},
+ ),
+
+ # any additional actions to pass to the mozharness command
+ Optional('actions'): [basestring],
+
+ # additional command-line options for mozharness, beyond those
+ # automatically added
+ Required('extra-options', default=[]): Any(
+ [basestring],
+ {'by-test-platform': {basestring: [basestring]}},
+ ),
+
+ # the artifact name (including path) to test on the build task; this is
+ # generally set in a per-kind transformation
+ Optional('build-artifact-name'): basestring,
+
+ # If true, tooltool downloads will be enabled via relengAPIProxy.
+ Required('tooltool-downloads', default=False): bool,
+
+ # This mozharness script also runs in Buildbot and tries to read a
+ # buildbot config file, so tell it not to do so in TaskCluster
+ Required('no-read-buildbot-config', default=False): bool,
+
+ # Add --blob-upload-branch=<project> mozharness parameter
+ Optional('include-blob-upload-branch'): bool,
+
+ # The setting for --download-symbols (if omitted, the option will not
+ # be passed to mozharness)
+ Optional('download-symbols'): Any(True, 'ondemand'),
+
+ # If set, then MOZ_NODE_PATH=/usr/local/bin/node is included in the
+ # environment. This is more than just a helpful path setting -- it
+ # causes xpcshell tests to start additional servers, and runs
+ # additional tests.
+ Required('set-moz-node-path', default=False): bool,
+
+ # If true, include chunking information in the command even if the number
+ # of chunks is 1
+ Required('chunked', default=False): bool,
+
+ # The chunking argument format to use
+ Required('chunking-args', default='this-chunk'): Any(
+ # Use the usual --this-chunk/--total-chunk arguments
+ 'this-chunk',
+ # Use --test-suite=<suite>-<chunk-suffix>; see chunk-suffix, below
+ 'test-suite-suffix',
+ ),
+
+ # the string to append to the `--test-suite` arugment when
+ # chunking-args = test-suite-suffix; "<CHUNK>" in this string will
+ # be replaced with the chunk number.
+ Optional('chunk-suffix'): basestring,
+ }),
+
+ # The current chunk; this is filled in by `all_kinds.py`
+ Optional('this-chunk'): int,
+
+ # os user groups for test task workers; required scopes, will be
+ # added automatically
+ Optional('os-groups', default=[]): Any(
+ [basestring],
+ # todo: create a dedicated elevated worker group and name here
+ {'by-test-platform': {basestring: [basestring]}},
+ ),
+
+ # -- values supplied by the task-generation infrastructure
+
+ # the platform of the build this task is testing
+ 'build-platform': basestring,
+
+ # the label of the build task generating the materials to test
+ 'build-label': basestring,
+
+ # the platform on which the tests will run
+ 'test-platform': basestring,
+
+ # the name of the test (the key in tests.yml)
+ 'test-name': basestring,
+
+}, required=True)
+
+
+# TODO: can we have validate and validate_full for before and after?
+def validate(config, tests):
+ for test in tests:
+ yield validate_schema(test_description_schema, test,
+ "In test {!r}:".format(test['test-name']))
diff --git a/taskcluster/taskgraph/transforms/upload_symbols.py b/taskcluster/taskgraph/transforms/upload_symbols.py
new file mode 100644
index 000000000..9b4884a97
--- /dev/null
+++ b/taskcluster/taskgraph/transforms/upload_symbols.py
@@ -0,0 +1,36 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+"""
+Transform the upload-symbols task description template,
+ taskcluster/ci/upload-symbols/job-template.yml
+into an actual task description.
+"""
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+from taskgraph.transforms.base import TransformSequence
+
+
+transforms = TransformSequence()
+
+
+@transforms.add
+def fill_template(config, tasks):
+ for task in tasks:
+ # Fill out the dynamic fields in the task description
+ task['label'] = task['build-label'] + '-upload-symbols'
+ task['dependencies'] = {'build': task['build-label']}
+ task['worker']['env']['GECKO_HEAD_REPOSITORY'] = config.params['head_repository']
+ task['worker']['env']['GECKO_HEAD_REV'] = config.params['head_rev']
+
+ build_platform, build_type = task['build-platform'].split('/')
+ attributes = task.setdefault('attributes', {})
+ attributes['build_platform'] = build_platform
+ attributes['build_type'] = build_type
+
+ # clear out the stuff that's not part of a task description
+ del task['build-label']
+ del task['build-platform']
+
+ yield task
diff --git a/taskcluster/taskgraph/try_option_syntax.py b/taskcluster/taskgraph/try_option_syntax.py
new file mode 100644
index 000000000..b5988db98
--- /dev/null
+++ b/taskcluster/taskgraph/try_option_syntax.py
@@ -0,0 +1,559 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import argparse
+import copy
+import logging
+import re
+import shlex
+
+logger = logging.getLogger(__name__)
+
+TRY_DELIMITER = 'try:'
+
+# The build type aliases are very cryptic and only used in try flags these are
+# mappings from the single char alias to a longer more recognizable form.
+BUILD_TYPE_ALIASES = {
+ 'o': 'opt',
+ 'd': 'debug'
+}
+
+# consider anything in this whitelist of kinds to be governed by -b/-p
+BUILD_KINDS = set([
+ 'build',
+ 'artifact-build',
+ 'hazard',
+ 'l10n',
+ 'upload-symbols',
+ 'valgrind',
+ 'static-analysis',
+ 'spidermonkey',
+])
+
+# anything in this list is governed by -j
+JOB_KINDS = set([
+ 'source-check',
+ 'toolchain',
+ 'marionette-harness',
+ 'android-stuff',
+])
+
+
+# mapping from shortcut name (usable with -u) to a boolean function identifying
+# matching test names
+def alias_prefix(prefix):
+ return lambda name: name.startswith(prefix)
+
+
+def alias_contains(infix):
+ return lambda name: infix in name
+
+
+def alias_matches(pattern):
+ pattern = re.compile(pattern)
+ return lambda name: pattern.match(name)
+
+UNITTEST_ALIASES = {
+ # Aliases specify shorthands that can be used in try syntax. The shorthand
+ # is the dictionary key, with the value representing a pattern for matching
+ # unittest_try_names.
+ #
+ # Note that alias expansion is performed in the absence of any chunk
+ # prefixes. For example, the first example above would replace "foo-7"
+ # with "foobar-7". Note that a few aliases allowed chunks to be specified
+ # without a leading `-`, for example 'mochitest-dt1'. That's no longer
+ # supported.
+ 'cppunit': alias_prefix('cppunit'),
+ 'crashtest': alias_prefix('crashtest'),
+ 'crashtest-e10s': alias_prefix('crashtest-e10s'),
+ 'e10s': alias_contains('e10s'),
+ 'external-media-tests': alias_prefix('external-media-tests'),
+ 'firefox-ui-functional': alias_prefix('firefox-ui-functional'),
+ 'firefox-ui-functional-e10s': alias_prefix('firefox-ui-functional-e10s'),
+ 'gaia-js-integration': alias_contains('gaia-js-integration'),
+ 'gtest': alias_prefix('gtest'),
+ 'jittest': alias_prefix('jittest'),
+ 'jittests': alias_prefix('jittest'),
+ 'jsreftest': alias_prefix('jsreftest'),
+ 'jsreftest-e10s': alias_prefix('jsreftest-e10s'),
+ 'marionette': alias_prefix('marionette'),
+ 'marionette-e10s': alias_prefix('marionette-e10s'),
+ 'mochitest': alias_prefix('mochitest'),
+ 'mochitests': alias_prefix('mochitest'),
+ 'mochitest-e10s': alias_prefix('mochitest-e10s'),
+ 'mochitests-e10s': alias_prefix('mochitest-e10s'),
+ 'mochitest-debug': alias_prefix('mochitest-debug-'),
+ 'mochitest-a11y': alias_contains('mochitest-a11y'),
+ 'mochitest-bc': alias_prefix('mochitest-browser-chrome'),
+ 'mochitest-e10s-bc': alias_prefix('mochitest-e10s-browser-chrome'),
+ 'mochitest-browser-chrome': alias_prefix('mochitest-browser-chrome'),
+ 'mochitest-e10s-browser-chrome': alias_prefix('mochitest-e10s-browser-chrome'),
+ 'mochitest-chrome': alias_contains('mochitest-chrome'),
+ 'mochitest-dt': alias_prefix('mochitest-devtools-chrome'),
+ 'mochitest-e10s-dt': alias_prefix('mochitest-e10s-devtools-chrome'),
+ 'mochitest-gl': alias_prefix('mochitest-webgl'),
+ 'mochitest-gl-e10s': alias_prefix('mochitest-webgl-e10s'),
+ 'mochitest-gpu': alias_prefix('mochitest-gpu'),
+ 'mochitest-gpu-e10s': alias_prefix('mochitest-gpu-e10s'),
+ 'mochitest-clipboard': alias_prefix('mochitest-clipboard'),
+ 'mochitest-clipboard-e10s': alias_prefix('mochitest-clipboard-e10s'),
+ 'mochitest-jetpack': alias_prefix('mochitest-jetpack'),
+ 'mochitest-media': alias_prefix('mochitest-media'),
+ 'mochitest-media-e10s': alias_prefix('mochitest-media-e10s'),
+ 'mochitest-vg': alias_prefix('mochitest-valgrind'),
+ 'reftest': alias_matches(r'^(plain-)?reftest.*$'),
+ 'reftest-no-accel': alias_matches(r'^(plain-)?reftest-no-accel.*$'),
+ 'reftests': alias_matches(r'^(plain-)?reftest.*$'),
+ 'reftests-e10s': alias_matches(r'^(plain-)?reftest-e10s.*$'),
+ 'robocop': alias_prefix('robocop'),
+ 'web-platform-test': alias_prefix('web-platform-tests'),
+ 'web-platform-tests': alias_prefix('web-platform-tests'),
+ 'web-platform-tests-e10s': alias_prefix('web-platform-tests-e10s'),
+ 'web-platform-tests-reftests': alias_prefix('web-platform-tests-reftests'),
+ 'web-platform-tests-reftests-e10s': alias_prefix('web-platform-tests-reftests-e10s'),
+ 'xpcshell': alias_prefix('xpcshell'),
+}
+
+# unittest platforms can be specified by substring of the "pretty name", which
+# is basically the old Buildbot builder name. This dict has {pretty name,
+# [test_platforms]} translations, This includes only the most commonly-used
+# substrings. This is intended only for backward-compatibility. New test
+# platforms should have their `test_platform` spelled out fully in try syntax.
+UNITTEST_PLATFORM_PRETTY_NAMES = {
+ 'Ubuntu': ['linux', 'linux64', 'linux64-asan'],
+ 'x64': ['linux64', 'linux64-asan'],
+ 'Android 4.3': ['android-4.3-arm7-api-15'],
+ # other commonly-used substrings for platforms not yet supported with
+ # in-tree taskgraphs:
+ # '10.10': [..TODO..],
+ # '10.10.5': [..TODO..],
+ # '10.6': [..TODO..],
+ # '10.8': [..TODO..],
+ # 'Android 2.3 API9': [..TODO..],
+ # 'Windows 7': [..TODO..],
+ # 'Windows 7 VM': [..TODO..],
+ # 'Windows 8': [..TODO..],
+ # 'Windows XP': [..TODO..],
+ # 'win32': [..TODO..],
+ # 'win64': [..TODO..],
+}
+
+# We have a few platforms for which we want to do some "extra" builds, or at
+# least build-ish things. Sort of. Anyway, these other things are implemented
+# as different "platforms". These do *not* automatically ride along with "-p
+# all"
+RIDEALONG_BUILDS = {
+ 'android-api-15': [
+ 'android-api-15-l10n',
+ ],
+ 'linux': [
+ 'linux-l10n',
+ ],
+ 'linux64': [
+ 'linux64-l10n',
+ 'sm-plain',
+ 'sm-nonunified',
+ 'sm-arm-sim',
+ 'sm-arm64-sim',
+ 'sm-compacting',
+ 'sm-rootanalysis',
+ 'sm-package',
+ 'sm-tsan',
+ 'sm-asan',
+ 'sm-mozjs-sys',
+ 'sm-msan',
+ ],
+}
+
+TEST_CHUNK_SUFFIX = re.compile('(.*)-([0-9]+)$')
+
+
+class TryOptionSyntax(object):
+
+ def __init__(self, message, full_task_graph):
+ """
+ Parse a "try syntax" formatted commit message. This is the old "-b do -p
+ win32 -u all" format. Aliases are applied to map short names to full
+ names.
+
+ The resulting object has attributes:
+
+ - build_types: a list containing zero or more of 'opt' and 'debug'
+ - platforms: a list of selected platform names, or None for all
+ - unittests: a list of tests, of the form given below, or None for all
+ - jobs: a list of requested job names, or None for all
+ - trigger_tests: the number of times tests should be triggered (--rebuild)
+ - interactive: true if --interactive
+ - notifications: either None if no notifications or one of 'all' or 'failure'
+
+ Note that -t is currently completely ignored.
+
+ The unittests and talos lists contain dictionaries of the form:
+
+ {
+ 'test': '<suite name>',
+ 'platforms': [..platform names..], # to limit to only certain platforms
+ 'only_chunks': set([..chunk numbers..]), # to limit only to certain chunks
+ }
+ """
+ self.jobs = []
+ self.build_types = []
+ self.platforms = []
+ self.unittests = []
+ self.talos = []
+ self.trigger_tests = 0
+ self.interactive = False
+ self.notifications = None
+
+ # shlex used to ensure we split correctly when giving values to argparse.
+ parts = shlex.split(self.escape_whitespace_in_brackets(message))
+ try_idx = None
+ for idx, part in enumerate(parts):
+ if part == TRY_DELIMITER:
+ try_idx = idx
+ break
+
+ if try_idx is None:
+ return
+
+ # Argument parser based on try flag flags
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-b', '--build', dest='build_types')
+ parser.add_argument('-p', '--platform', nargs='?',
+ dest='platforms', const='all', default='all')
+ parser.add_argument('-u', '--unittests', nargs='?',
+ dest='unittests', const='all', default='all')
+ parser.add_argument('-t', '--talos', nargs='?', dest='talos', const='all', default='all')
+ parser.add_argument('-i', '--interactive',
+ dest='interactive', action='store_true', default=False)
+ parser.add_argument('-e', '--all-emails',
+ dest='notifications', action='store_const', const='all')
+ parser.add_argument('-f', '--failure-emails',
+ dest='notifications', action='store_const', const='failure')
+ parser.add_argument('-j', '--job', dest='jobs', action='append')
+ # In order to run test jobs multiple times
+ parser.add_argument('--rebuild', dest='trigger_tests', type=int, default=1)
+ args, _ = parser.parse_known_args(parts[try_idx:])
+
+ self.jobs = self.parse_jobs(args.jobs)
+ self.build_types = self.parse_build_types(args.build_types)
+ self.platforms = self.parse_platforms(args.platforms)
+ self.unittests = self.parse_test_option(
+ "unittest_try_name", args.unittests, full_task_graph)
+ self.talos = self.parse_test_option("talos_try_name", args.talos, full_task_graph)
+ self.trigger_tests = args.trigger_tests
+ self.interactive = args.interactive
+ self.notifications = args.notifications
+
+ def parse_jobs(self, jobs_arg):
+ if not jobs_arg or jobs_arg == ['all']:
+ return None
+ expanded = []
+ for job in jobs_arg:
+ expanded.extend(j.strip() for j in job.split(','))
+ return expanded
+
+ def parse_build_types(self, build_types_arg):
+ if build_types_arg is None:
+ build_types_arg = []
+ build_types = filter(None, [BUILD_TYPE_ALIASES.get(build_type) for
+ build_type in build_types_arg])
+ return build_types
+
+ def parse_platforms(self, platform_arg):
+ if platform_arg == 'all':
+ return None
+
+ results = []
+ for build in platform_arg.split(','):
+ results.append(build)
+ if build in RIDEALONG_BUILDS:
+ results.extend(RIDEALONG_BUILDS[build])
+ logger.info("platform %s triggers ridealong builds %s" %
+ (build, ', '.join(RIDEALONG_BUILDS[build])))
+
+ return results
+
+ def parse_test_option(self, attr_name, test_arg, full_task_graph):
+ '''
+
+ Parse a unittest (-u) or talos (-t) option, in the context of a full
+ task graph containing available `unittest_try_name` or `talos_try_name`
+ attributes. There are three cases:
+
+ - test_arg is == 'none' (meaning an empty list)
+ - test_arg is == 'all' (meaning use the list of jobs for that job type)
+ - test_arg is comma string which needs to be parsed
+ '''
+
+ # Empty job list case...
+ if test_arg is None or test_arg == 'none':
+ return []
+
+ all_platforms = set(t.attributes['test_platform']
+ for t in full_task_graph.tasks.itervalues()
+ if 'test_platform' in t.attributes)
+
+ tests = self.parse_test_opts(test_arg, all_platforms)
+
+ if not tests:
+ return []
+
+ all_tests = set(t.attributes[attr_name]
+ for t in full_task_graph.tasks.itervalues()
+ if attr_name in t.attributes)
+
+ # Special case where tests is 'all' and must be expanded
+ if tests[0]['test'] == 'all':
+ results = []
+ all_entry = tests[0]
+ for test in all_tests:
+ entry = {'test': test}
+ # If there are platform restrictions copy them across the list.
+ if 'platforms' in all_entry:
+ entry['platforms'] = list(all_entry['platforms'])
+ results.append(entry)
+ return self.parse_test_chunks(all_tests, results)
+ else:
+ return self.parse_test_chunks(all_tests, tests)
+
+ def parse_test_opts(self, input_str, all_platforms):
+ '''
+ Parse `testspec,testspec,..`, where each testspec is a test name
+ optionally followed by a list of test platforms or negated platforms in
+ `[]`.
+
+ No brackets indicates that tests should run on all platforms for which
+ builds are available. If testspecs are provided, then each is treated,
+ from left to right, as an instruction to include or (if negated)
+ exclude a set of test platforms. A single spec may expand to multiple
+ test platforms via UNITTEST_PLATFORM_PRETTY_NAMES. If the first test
+ spec is negated, processing begins with the full set of available test
+ platforms; otherwise, processing begins with an empty set of test
+ platforms.
+ '''
+
+ # Final results which we will return.
+ tests = []
+
+ cur_test = {}
+ token = ''
+ in_platforms = False
+
+ def normalize_platforms():
+ if 'platforms' not in cur_test:
+ return
+ # if the first spec is a negation, start with all platforms
+ if cur_test['platforms'][0][0] == '-':
+ platforms = all_platforms.copy()
+ else:
+ platforms = []
+ for platform in cur_test['platforms']:
+ if platform[0] == '-':
+ platforms = [p for p in platforms if p != platform[1:]]
+ else:
+ platforms.append(platform)
+ cur_test['platforms'] = platforms
+
+ def add_test(value):
+ normalize_platforms()
+ cur_test['test'] = value.strip()
+ tests.insert(0, cur_test)
+
+ def add_platform(value):
+ platform = value.strip()
+ if platform[0] == '-':
+ negated = True
+ platform = platform[1:]
+ else:
+ negated = False
+ platforms = UNITTEST_PLATFORM_PRETTY_NAMES.get(platform, [platform])
+ if negated:
+ platforms = ["-" + p for p in platforms]
+ cur_test['platforms'] = platforms + cur_test.get('platforms', [])
+
+ # This might be somewhat confusing but we parse the string _backwards_ so
+ # there is no ambiguity over what state we are in.
+ for char in reversed(input_str):
+
+ # , indicates exiting a state
+ if char == ',':
+
+ # Exit a particular platform.
+ if in_platforms:
+ add_platform(token)
+
+ # Exit a particular test.
+ else:
+ add_test(token)
+ cur_test = {}
+
+ # Token must always be reset after we exit a state
+ token = ''
+ elif char == '[':
+ # Exiting platform state entering test state.
+ add_platform(token)
+ token = ''
+ in_platforms = False
+ elif char == ']':
+ # Entering platform state.
+ in_platforms = True
+ else:
+ # Accumulator.
+ token = char + token
+
+ # Handle any left over tokens.
+ if token:
+ add_test(token)
+
+ return tests
+
+ def handle_alias(self, test, all_tests):
+ '''
+ Expand a test if its name refers to an alias, returning a list of test
+ dictionaries cloned from the first (to maintain any metadata).
+ '''
+ if test['test'] not in UNITTEST_ALIASES:
+ return [test]
+
+ alias = UNITTEST_ALIASES[test['test']]
+
+ def mktest(name):
+ newtest = copy.deepcopy(test)
+ newtest['test'] = name
+ return newtest
+
+ def exprmatch(alias):
+ return [t for t in all_tests if alias(t)]
+
+ return [mktest(t) for t in exprmatch(alias)]
+
+ def parse_test_chunks(self, all_tests, tests):
+ '''
+ Test flags may include parameters to narrow down the number of chunks in a
+ given push. We don't model 1 chunk = 1 job in taskcluster so we must check
+ each test flag to see if it is actually specifying a chunk.
+ '''
+ results = []
+ seen_chunks = {}
+ for test in tests:
+ matches = TEST_CHUNK_SUFFIX.match(test['test'])
+ if matches:
+ name = matches.group(1)
+ chunk = matches.group(2)
+ if name in seen_chunks:
+ seen_chunks[name].add(chunk)
+ else:
+ seen_chunks[name] = {chunk}
+ test['test'] = name
+ test['only_chunks'] = seen_chunks[name]
+ results.append(test)
+ else:
+ results.extend(self.handle_alias(test, all_tests))
+
+ # uniquify the results over the test names
+ results = {test['test']: test for test in results}.values()
+ return results
+
+ def find_all_attribute_suffixes(self, graph, prefix):
+ rv = set()
+ for t in graph.tasks.itervalues():
+ for a in t.attributes:
+ if a.startswith(prefix):
+ rv.add(a[len(prefix):])
+ return sorted(rv)
+
+ def escape_whitespace_in_brackets(self, input_str):
+ '''
+ In tests you may restrict them by platform [] inside of the brackets
+ whitespace may occur this is typically invalid shell syntax so we escape it
+ with backslash sequences .
+ '''
+ result = ""
+ in_brackets = False
+ for char in input_str:
+ if char == '[':
+ in_brackets = True
+ result += char
+ continue
+
+ if char == ']':
+ in_brackets = False
+ result += char
+ continue
+
+ if char == ' ' and in_brackets:
+ result += '\ '
+ continue
+
+ result += char
+
+ return result
+
+ def task_matches(self, attributes):
+ attr = attributes.get
+
+ def check_run_on_projects():
+ return set(['try', 'all']) & set(attr('run_on_projects', []))
+
+ def match_test(try_spec, attr_name):
+ if attr('build_type') not in self.build_types:
+ return False
+ if self.platforms is not None:
+ if attr('build_platform') not in self.platforms:
+ return False
+ else:
+ if not check_run_on_projects():
+ return False
+ if try_spec is None:
+ return True
+ # TODO: optimize this search a bit
+ for test in try_spec:
+ if attr(attr_name) == test['test']:
+ break
+ else:
+ return False
+ if 'platforms' in test and attr('test_platform') not in test['platforms']:
+ return False
+ if 'only_chunks' in test and attr('test_chunk') not in test['only_chunks']:
+ return False
+ return True
+
+ if attr('kind') in ('desktop-test', 'android-test'):
+ return match_test(self.unittests, 'unittest_try_name')
+ elif attr('kind') in JOB_KINDS:
+ if self.jobs is None:
+ return True
+ if attr('build_platform') in self.jobs:
+ return True
+ elif attr('kind') in BUILD_KINDS:
+ if attr('build_type') not in self.build_types:
+ return False
+ elif self.platforms is None:
+ # for "-p all", look for try in the 'run_on_projects' attribute
+ return check_run_on_projects()
+ else:
+ if attr('build_platform') not in self.platforms:
+ return False
+ return True
+ else:
+ return False
+
+ def __str__(self):
+ def none_for_all(list):
+ if list is None:
+ return '<all>'
+ return ', '.join(str(e) for e in list)
+
+ return "\n".join([
+ "build_types: " + ", ".join(self.build_types),
+ "platforms: " + none_for_all(self.platforms),
+ "unittests: " + none_for_all(self.unittests),
+ "jobs: " + none_for_all(self.jobs),
+ "trigger_tests: " + str(self.trigger_tests),
+ "interactive: " + str(self.interactive),
+ "notifications: " + self.notifications,
+ ])
diff --git a/taskcluster/taskgraph/util/__init__.py b/taskcluster/taskgraph/util/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/taskcluster/taskgraph/util/__init__.py
diff --git a/taskcluster/taskgraph/util/attributes.py b/taskcluster/taskgraph/util/attributes.py
new file mode 100644
index 000000000..b44a3364f
--- /dev/null
+++ b/taskcluster/taskgraph/util/attributes.py
@@ -0,0 +1,26 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+
+def attrmatch(attributes, **kwargs):
+ """Determine whether the given set of task attributes matches. The
+ conditions are given as keyword arguments, where each keyword names an
+ attribute. The keyword value can be a literal, a set, or a callable. A
+ literal must match the attribute exactly. Given a set, the attribute value
+ must be in the set. A callable is called with the attribute value. If an
+ attribute is specified as a keyword argument but not present in the
+ attributes, the result is False."""
+ for kwkey, kwval in kwargs.iteritems():
+ if kwkey not in attributes:
+ return False
+ attval = attributes[kwkey]
+ if isinstance(kwval, set):
+ if attval not in kwval:
+ return False
+ elif callable(kwval):
+ if not kwval(attval):
+ return False
+ elif kwval != attributes[kwkey]:
+ return False
+ return True
diff --git a/taskcluster/taskgraph/util/docker.py b/taskcluster/taskgraph/util/docker.py
new file mode 100644
index 000000000..df97e57bc
--- /dev/null
+++ b/taskcluster/taskgraph/util/docker.py
@@ -0,0 +1,160 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import hashlib
+import os
+import shutil
+import subprocess
+import tarfile
+import tempfile
+
+from mozpack.archive import (
+ create_tar_gz_from_files,
+)
+
+
+GECKO = os.path.realpath(os.path.join(__file__, '..', '..', '..', '..'))
+DOCKER_ROOT = os.path.join(GECKO, 'testing', 'docker')
+INDEX_PREFIX = 'docker.images.v2'
+ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
+
+
+def docker_image(name, default_version=None):
+ '''Determine the docker image name, including repository and tag, from an
+ in-tree docker file.'''
+ try:
+ with open(os.path.join(DOCKER_ROOT, name, 'REGISTRY')) as f:
+ registry = f.read().strip()
+ except IOError:
+ with open(os.path.join(DOCKER_ROOT, 'REGISTRY')) as f:
+ registry = f.read().strip()
+
+ try:
+ with open(os.path.join(DOCKER_ROOT, name, 'VERSION')) as f:
+ version = f.read().strip()
+ except IOError:
+ if not default_version:
+ raise
+
+ version = default_version
+
+ return '{}/{}:{}'.format(registry, name, version)
+
+
+def generate_context_hash(topsrcdir, image_path, image_name):
+ """Generates a sha256 hash for context directory used to build an image."""
+
+ # It is a bit unfortunate we have to create a temp file here - it would
+ # be nicer to use an in-memory buffer.
+ fd, p = tempfile.mkstemp()
+ os.close(fd)
+ try:
+ return create_context_tar(topsrcdir, image_path, p, image_name)
+ finally:
+ os.unlink(p)
+
+
+def create_context_tar(topsrcdir, context_dir, out_path, prefix):
+ """Create a context tarball.
+
+ A directory ``context_dir`` containing a Dockerfile will be assembled into
+ a gzipped tar file at ``out_path``. Files inside the archive will be
+ prefixed by directory ``prefix``.
+
+ We also scan the source Dockerfile for special syntax that influences
+ context generation.
+
+ If a line in the Dockerfile has the form ``# %include <path>``,
+ the relative path specified on that line will be matched against
+ files in the source repository and added to the context under the
+ path ``topsrcdir/``. If an entry is a directory, we add all files
+ under that directory.
+
+ Returns the SHA-256 hex digest of the created archive.
+ """
+ archive_files = {}
+
+ for root, dirs, files in os.walk(context_dir):
+ for f in files:
+ source_path = os.path.join(root, f)
+ rel = source_path[len(context_dir) + 1:]
+ archive_path = os.path.join(prefix, rel)
+ archive_files[archive_path] = source_path
+
+ # Parse Dockerfile for special syntax of extra files to include.
+ with open(os.path.join(context_dir, 'Dockerfile'), 'rb') as fh:
+ for line in fh:
+ line = line.rstrip()
+ if not line.startswith('# %include'):
+ continue
+
+ p = line[len('# %include '):].strip()
+ if os.path.isabs(p):
+ raise Exception('extra include path cannot be absolute: %s' % p)
+
+ fs_path = os.path.normpath(os.path.join(topsrcdir, p))
+ # Check for filesystem traversal exploits.
+ if not fs_path.startswith(topsrcdir):
+ raise Exception('extra include path outside topsrcdir: %s' % p)
+
+ if not os.path.exists(fs_path):
+ raise Exception('extra include path does not exist: %s' % p)
+
+ if os.path.isdir(fs_path):
+ for root, dirs, files in os.walk(fs_path):
+ for f in files:
+ source_path = os.path.join(root, f)
+ archive_path = os.path.join(prefix, 'topsrcdir', p, f)
+ archive_files[archive_path] = source_path
+ else:
+ archive_path = os.path.join(prefix, 'topsrcdir', p)
+ archive_files[archive_path] = fs_path
+
+ with open(out_path, 'wb') as fh:
+ create_tar_gz_from_files(fh, archive_files, '%s.tar.gz' % prefix)
+
+ h = hashlib.sha256()
+ with open(out_path, 'rb') as fh:
+ while True:
+ data = fh.read(32768)
+ if not data:
+ break
+ h.update(data)
+ return h.hexdigest()
+
+
+def build_from_context(docker_bin, context_path, prefix, tag=None):
+ """Build a Docker image from a context archive.
+
+ Given the path to a `docker` binary, a image build tar.gz (produced with
+ ``create_context_tar()``, a prefix in that context containing files, and
+ an optional ``tag`` for the produced image, build that Docker image.
+ """
+ d = tempfile.mkdtemp()
+ try:
+ with tarfile.open(context_path, 'r:gz') as tf:
+ tf.extractall(d)
+
+ # If we wanted to do post-processing of the Dockerfile, this is
+ # where we'd do it.
+
+ args = [
+ docker_bin,
+ 'build',
+ # Use --no-cache so we always get the latest package updates.
+ '--no-cache',
+ ]
+
+ if tag:
+ args.extend(['-t', tag])
+
+ args.append('.')
+
+ res = subprocess.call(args, cwd=os.path.join(d, prefix))
+ if res:
+ raise Exception('error building image')
+ finally:
+ shutil.rmtree(d)
diff --git a/taskcluster/taskgraph/util/python_path.py b/taskcluster/taskgraph/util/python_path.py
new file mode 100644
index 000000000..b14223ca6
--- /dev/null
+++ b/taskcluster/taskgraph/util/python_path.py
@@ -0,0 +1,27 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+
+def find_object(path):
+ """
+ Find a Python object given a path of the form <modulepath>:<objectpath>.
+ Conceptually equivalent to
+
+ def find_object(modulepath, objectpath):
+ import <modulepath> as mod
+ return mod.<objectpath>
+ """
+ if path.count(':') != 1:
+ raise ValueError(
+ 'python path {!r} does not have the form "module:object"'.format(path))
+
+ modulepath, objectpath = path.split(':')
+ obj = __import__(modulepath)
+ for a in modulepath.split('.')[1:]:
+ obj = getattr(obj, a)
+ for a in objectpath.split('.'):
+ obj = getattr(obj, a)
+ return obj
diff --git a/taskcluster/taskgraph/util/seta.py b/taskcluster/taskgraph/util/seta.py
new file mode 100644
index 000000000..a0cd30675
--- /dev/null
+++ b/taskcluster/taskgraph/util/seta.py
@@ -0,0 +1,85 @@
+import json
+import logging
+import requests
+from redo import retry
+from requests import exceptions
+
+logger = logging.getLogger(__name__)
+headers = {
+ 'User-Agent': 'TaskCluster'
+}
+
+# It's a list of project name which SETA is useful on
+SETA_PROJECTS = ['mozilla-inbound', 'autoland']
+SETA_ENDPOINT = "https://seta.herokuapp.com/data/setadetails/?branch=%s"
+
+
+class SETA(object):
+ """
+ Interface to the SETA service, which defines low-value tasks that can be optimized out
+ of the taskgraph.
+ """
+ def __init__(self):
+ # cached low value tasks, by project
+ self.low_value_tasks = {}
+
+ def query_low_value_tasks(self, project):
+ # Request the set of low value tasks from the SETA service. Low value tasks will be
+ # optimized out of the task graph.
+ if project not in SETA_PROJECTS:
+ logger.debug("SETA is not enabled for project `{}`".format(project))
+ return []
+
+ logger.debug("Querying SETA service for low-value tasks on {}".format(project))
+ low_value_tasks = []
+
+ url = SETA_ENDPOINT % project
+ # Try to fetch the SETA data twice, falling back to an empty list of low value tasks.
+ # There are 10 seconds between each try.
+ try:
+ logger.debug("Retrieving low-value jobs list from SETA")
+ response = retry(requests.get, attempts=2, sleeptime=10,
+ args=(url, ),
+ kwargs={'timeout': 5, 'headers': headers})
+ task_list = json.loads(response.content).get('jobtypes', '')
+ if len(task_list) > 0:
+ low_value_tasks = task_list.values()[0]
+
+ # Bug 1315145, disable SETA for tier-1 platforms until backfill is implemented.
+ low_value_tasks = [x for x in low_value_tasks if x.find('debug') == -1]
+ low_value_tasks = [x for x in low_value_tasks if x.find('asan') == -1]
+
+ # In the event of request times out, requests will raise a TimeoutError.
+ except exceptions.Timeout:
+ logger.warning("SETA server is timeout, we will treat all test tasks as high value.")
+
+ # In the event of a network problem (e.g. DNS failure, refused connection, etc),
+ # requests will raise a ConnectionError.
+ except exceptions.ConnectionError:
+ logger.warning("SETA server is timeout, we will treat all test tasks as high value.")
+
+ # In the event of the rare invalid HTTP response(e.g 404, 401),
+ # requests will raise an HTTPError exception
+ except exceptions.HTTPError:
+ logger.warning("We got bad Http response from ouija,"
+ " we will treat all test tasks as high value.")
+
+ # We just print the error out as a debug message if we failed to catch the exception above
+ except exceptions.RequestException as error:
+ logger.warning(error)
+
+ # When we get invalid JSON (i.e. 500 error), it results in a ValueError (bug 1313426)
+ except ValueError as error:
+ logger.warning("Invalid JSON, possible server error: {}".format(error))
+
+ return low_value_tasks
+
+ def is_low_value_task(self, label, project):
+ # cache the low value tasks per project to avoid repeated SETA server queries
+ if project not in self.low_value_tasks:
+ self.low_value_tasks[project] = self.query_low_value_tasks(project)
+ return label in self.low_value_tasks[project]
+
+# create a single instance of this class, and expose its `is_low_value_task`
+# bound method as a module-level function
+is_low_value_task = SETA().is_low_value_task
diff --git a/taskcluster/taskgraph/util/templates.py b/taskcluster/taskgraph/util/templates.py
new file mode 100644
index 000000000..97620fa75
--- /dev/null
+++ b/taskcluster/taskgraph/util/templates.py
@@ -0,0 +1,155 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+
+import pystache
+import yaml
+import copy
+
+# Key used in template inheritance...
+INHERITS_KEY = '$inherits'
+
+
+def merge_to(source, dest):
+ '''
+ Merge dict and arrays (override scalar values)
+
+ Keys from source override keys from dest, and elements from lists in source
+ are appended to lists in dest.
+
+ :param dict source: to copy from
+ :param dict dest: to copy to (modified in place)
+ '''
+
+ for key, value in source.items():
+ # Override mismatching or empty types
+ if type(value) != type(dest.get(key)): # noqa
+ dest[key] = source[key]
+ continue
+
+ # Merge dict
+ if isinstance(value, dict):
+ merge_to(value, dest[key])
+ continue
+
+ if isinstance(value, list):
+ dest[key] = dest[key] + source[key]
+ continue
+
+ dest[key] = source[key]
+
+ return dest
+
+
+def merge(*objects):
+ '''
+ Merge the given objects, using the semantics described for merge_to, with
+ objects later in the list taking precedence. From an inheritance
+ perspective, "parents" should be listed before "children".
+
+ Returns the result without modifying any arguments.
+ '''
+ if len(objects) == 1:
+ return copy.deepcopy(objects[0])
+ return merge_to(objects[-1], merge(*objects[:-1]))
+
+
+class TemplatesException(Exception):
+ pass
+
+
+class Templates():
+ '''
+ The taskcluster integration makes heavy use of yaml to describe tasks this
+ class handles the loading/rendering.
+ '''
+
+ def __init__(self, root):
+ '''
+ Initialize the template render.
+
+ :param str root: Root path where to load yaml files.
+ '''
+ if not root:
+ raise TemplatesException('Root is required')
+
+ if not os.path.isdir(root):
+ raise TemplatesException('Root must be a directory')
+
+ self.root = root
+
+ def _inherits(self, path, obj, properties, seen):
+ blueprint = obj.pop(INHERITS_KEY)
+ seen.add(path)
+
+ # Resolve the path here so we can detect circular references.
+ template = self.resolve_path(blueprint.get('from'))
+ variables = blueprint.get('variables', {})
+
+ # Passed parameters override anything in the task itself.
+ for key in properties:
+ variables[key] = properties[key]
+
+ if not template:
+ msg = '"{}" inheritance template missing'.format(path)
+ raise TemplatesException(msg)
+
+ if template in seen:
+ msg = 'Error while handling "{}" in "{}" circular template' + \
+ 'inheritance seen \n {}'
+ raise TemplatesException(msg.format(path, template, seen))
+
+ try:
+ out = self.load(template, variables, seen)
+ except TemplatesException as e:
+ msg = 'Error expanding parent ("{}") of "{}" original error {}'
+ raise TemplatesException(msg.format(template, path, str(e)))
+
+ # Anything left in obj is merged into final results (and overrides)
+ return merge_to(obj, out)
+
+ def render(self, path, content, parameters, seen):
+ '''
+ Renders a given yaml string.
+
+ :param str path: used to prevent infinite recursion in inheritance.
+ :param str content: Of yaml file.
+ :param dict parameters: For mustache templates.
+ :param set seen: Seen files (used for inheritance)
+ '''
+ content = pystache.render(content, parameters)
+ result = yaml.load(content)
+
+ # In addition to the usual template logic done by mustache we also
+ # handle special '$inherit' dict keys.
+ if isinstance(result, dict) and INHERITS_KEY in result:
+ return self._inherits(path, result, parameters, seen)
+
+ return result
+
+ def resolve_path(self, path):
+ return os.path.join(self.root, path)
+
+ def load(self, path, parameters=None, seen=None):
+ '''
+ Load an render the given yaml path.
+
+ :param str path: Location of yaml file to load (relative to root).
+ :param dict parameters: To template yaml file with.
+ '''
+ seen = seen or set()
+
+ if not path:
+ raise TemplatesException('path is required')
+
+ path = self.resolve_path(path)
+
+ if not os.path.isfile(path):
+ raise TemplatesException('"{}" is not a file'.format(path))
+
+ content = open(path).read()
+ return self.render(path, content, parameters, seen)
diff --git a/taskcluster/taskgraph/util/time.py b/taskcluster/taskgraph/util/time.py
new file mode 100644
index 000000000..160aaa70c
--- /dev/null
+++ b/taskcluster/taskgraph/util/time.py
@@ -0,0 +1,114 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# Python port of the ms.js node module this is not a direct port some things are
+# more complicated or less precise and we lean on time delta here.
+
+import re
+import datetime
+
+PATTERN = re.compile(
+ '((?:\d+)?\.?\d+) *([a-z]+)'
+)
+
+
+def seconds(value):
+ return datetime.timedelta(seconds=int(value))
+
+
+def minutes(value):
+ return datetime.timedelta(minutes=int(value))
+
+
+def hours(value):
+ return datetime.timedelta(hours=int(value))
+
+
+def days(value):
+ return datetime.timedelta(days=int(value))
+
+
+def months(value):
+ # See warning in years(), below
+ return datetime.timedelta(days=int(value) * 30)
+
+
+def years(value):
+ # Warning here "years" are vague don't use this for really sensitive date
+ # computation the idea is to give you a absolute amount of time in the
+ # future which is not the same thing as "precisely on this date next year"
+ return datetime.timedelta(days=int(value) * 365)
+
+ALIASES = {}
+ALIASES['seconds'] = ALIASES['second'] = ALIASES['s'] = seconds
+ALIASES['minutes'] = ALIASES['minute'] = ALIASES['min'] = minutes
+ALIASES['hours'] = ALIASES['hour'] = ALIASES['h'] = hours
+ALIASES['days'] = ALIASES['day'] = ALIASES['d'] = days
+ALIASES['months'] = ALIASES['month'] = ALIASES['mo'] = months
+ALIASES['years'] = ALIASES['year'] = ALIASES['y'] = years
+
+
+class InvalidString(Exception):
+ pass
+
+
+class UnknownTimeMeasurement(Exception):
+ pass
+
+
+def value_of(input_str):
+ '''
+ Convert a string to a json date in the future
+ :param str input_str: (ex: 1d, 2d, 6years, 2 seconds)
+ :returns: Unit given in seconds
+ '''
+
+ matches = PATTERN.search(input_str)
+
+ if matches is None or len(matches.groups()) < 2:
+ raise InvalidString("'{}' is invalid string".format(input_str))
+
+ value, unit = matches.groups()
+
+ if unit not in ALIASES:
+ raise UnknownTimeMeasurement(
+ '{} is not a valid time measure use one of {}'.format(
+ unit,
+ sorted(ALIASES.keys())
+ )
+ )
+
+ return ALIASES[unit](value)
+
+
+def json_time_from_now(input_str, now=None, datetime_format=False):
+ '''
+ :param str input_str: Input string (see value of)
+ :param datetime now: Optionally set the definition of `now`
+ :param boolean datetime_format: Set `True` to get a `datetime` output
+ :returns: JSON string representation of time in future.
+ '''
+
+ if now is None:
+ now = datetime.datetime.utcnow()
+
+ time = now + value_of(input_str)
+
+ if datetime_format is True:
+ return time
+ else:
+ # Sorta a big hack but the json schema validator for date does not like the
+ # ISO dates until 'Z' (for timezone) is added...
+ return time.isoformat() + 'Z'
+
+
+def current_json_time(datetime_format=False):
+ '''
+ :param boolean datetime_format: Set `True` to get a `datetime` output
+ :returns: JSON string representation of the current time.
+ '''
+ if datetime_format is True:
+ return datetime.datetime.utcnow()
+ else:
+ return datetime.datetime.utcnow().isoformat() + 'Z'
diff --git a/taskcluster/taskgraph/util/treeherder.py b/taskcluster/taskgraph/util/treeherder.py
new file mode 100644
index 000000000..e66db582f
--- /dev/null
+++ b/taskcluster/taskgraph/util/treeherder.py
@@ -0,0 +1,24 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+import re
+
+
+def split_symbol(treeherder_symbol):
+ """Split a symbol expressed as grp(sym) into its two parts. If no group is
+ given, the returned group is '?'"""
+ groupSymbol = '?'
+ symbol = treeherder_symbol
+ if '(' in symbol:
+ groupSymbol, symbol = re.match(r'([^(]*)\(([^)]*)\)', symbol).groups()
+ return groupSymbol, symbol
+
+
+def join_symbol(group, symbol):
+ """Perform the reverse of split_symbol, combining the given group and
+ symbol. If the group is '?', then it is omitted."""
+ if group == '?':
+ return symbol
+ return '{}({})'.format(group, symbol)
diff --git a/taskcluster/taskgraph/util/yaml.py b/taskcluster/taskgraph/util/yaml.py
new file mode 100644
index 000000000..4e541b775
--- /dev/null
+++ b/taskcluster/taskgraph/util/yaml.py
@@ -0,0 +1,16 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import absolute_import, print_function, unicode_literals
+
+import os
+import yaml
+
+
+def load_yaml(path, name):
+ """Convenience function to load a YAML file in the given path. This is
+ useful for loading kind configuration files from the kind path."""
+ filename = os.path.join(path, name)
+ with open(filename, "rb") as f:
+ return yaml.load(f)