diff options
author | Matt A. Tobin <mattatobin@localhost.localdomain> | 2018-02-02 04:16:08 -0500 |
---|---|---|
committer | Matt A. Tobin <mattatobin@localhost.localdomain> | 2018-02-02 04:16:08 -0500 |
commit | 5f8de423f190bbb79a62f804151bc24824fa32d8 (patch) | |
tree | 10027f336435511475e392454359edea8e25895d /taskcluster/taskgraph/transforms/job | |
parent | 49ee0794b5d912db1f95dce6eb52d781dc210db5 (diff) | |
download | UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.gz UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.lz UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.xz UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.zip |
Add m-esr52 at 52.6.0
Diffstat (limited to 'taskcluster/taskgraph/transforms/job')
-rw-r--r-- | taskcluster/taskgraph/transforms/job/__init__.py | 164 | ||||
-rw-r--r-- | taskcluster/taskgraph/transforms/job/common.py | 108 | ||||
-rw-r--r-- | taskcluster/taskgraph/transforms/job/hazard.py | 91 | ||||
-rw-r--r-- | taskcluster/taskgraph/transforms/job/mach.py | 30 | ||||
-rw-r--r-- | taskcluster/taskgraph/transforms/job/mozharness.py | 226 | ||||
-rw-r--r-- | taskcluster/taskgraph/transforms/job/run_task.py | 59 | ||||
-rw-r--r-- | taskcluster/taskgraph/transforms/job/spidermonkey.py | 86 | ||||
-rw-r--r-- | taskcluster/taskgraph/transforms/job/toolchain.py | 115 |
8 files changed, 879 insertions, 0 deletions
diff --git a/taskcluster/taskgraph/transforms/job/__init__.py b/taskcluster/taskgraph/transforms/job/__init__.py new file mode 100644 index 000000000..a0860c032 --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/__init__.py @@ -0,0 +1,164 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Convert a job description into a task description. + +Jobs descriptions are similar to task descriptions, but they specify how to run +the job at a higher level, using a "run" field that can be interpreted by +run-using handlers in `taskcluster/taskgraph/transforms/job`. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import copy +import logging +import os + +from taskgraph.transforms.base import validate_schema, TransformSequence +from taskgraph.transforms.task import task_description_schema +from voluptuous import ( + Optional, + Required, + Schema, + Extra, +) + +logger = logging.getLogger(__name__) + +# Voluptuous uses marker objects as dictionary *keys*, but they are not +# comparable, so we cast all of the keys back to regular strings +task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()} + +# Schema for a build description +job_description_schema = Schema({ + # The name of the job and the job's label. At least one must be specified, + # and the label will be generated from the name if necessary, by prepending + # the kind. + Optional('name'): basestring, + Optional('label'): basestring, + + # the following fields are passed directly through to the task description, + # possibly modified by the run implementation. See + # taskcluster/taskgraph/transforms/task.py for the schema details. + Required('description'): task_description_schema['description'], + Optional('attributes'): task_description_schema['attributes'], + Optional('dependencies'): task_description_schema['dependencies'], + Optional('expires-after'): task_description_schema['expires-after'], + Optional('routes'): task_description_schema['routes'], + Optional('scopes'): task_description_schema['scopes'], + Optional('extra'): task_description_schema['extra'], + Optional('treeherder'): task_description_schema['treeherder'], + Optional('index'): task_description_schema['index'], + Optional('run-on-projects'): task_description_schema['run-on-projects'], + Optional('coalesce-name'): task_description_schema['coalesce-name'], + Optional('worker-type'): task_description_schema['worker-type'], + Required('worker'): task_description_schema['worker'], + Optional('when'): task_description_schema['when'], + + # A description of how to run this job. + 'run': { + # The key to a job implementation in a peer module to this one + 'using': basestring, + + # Any remaining content is verified against that job implementation's + # own schema. + Extra: object, + }, +}) + +transforms = TransformSequence() + + +@transforms.add +def validate(config, jobs): + for job in jobs: + yield validate_schema(job_description_schema, job, + "In job {!r}:".format(job['name'])) + + +@transforms.add +def make_task_description(config, jobs): + """Given a build description, create a task description""" + # import plugin modules first, before iterating over jobs + import_all() + for job in jobs: + if 'label' not in job: + if 'name' not in job: + raise Exception("job has neither a name nor a label") + job['label'] = '{}-{}'.format(config.kind, job['name']) + if job['name']: + del job['name'] + + taskdesc = copy.deepcopy(job) + + # fill in some empty defaults to make run implementations easier + taskdesc.setdefault('attributes', {}) + taskdesc.setdefault('dependencies', {}) + taskdesc.setdefault('routes', []) + taskdesc.setdefault('scopes', []) + taskdesc.setdefault('extra', {}) + + # give the function for job.run.using on this worker implementation a + # chance to set up the task description. + configure_taskdesc_for_run(config, job, taskdesc) + del taskdesc['run'] + + # yield only the task description, discarding the job description + yield taskdesc + +# A registry of all functions decorated with run_job_using +registry = {} + + +def run_job_using(worker_implementation, run_using, schema=None): + """Register the decorated function as able to set up a task description for + jobs with the given worker implementation and `run.using` property. If + `schema` is given, the job's run field will be verified to match it. + + The decorated function should have the signature `using_foo(config, job, + taskdesc) and should modify the task description in-place. The skeleton of + the task description is already set up, but without a payload.""" + def wrap(func): + for_run_using = registry.setdefault(run_using, {}) + if worker_implementation in for_run_using: + raise Exception("run_job_using({!r}, {!r}) already exists: {!r}".format( + run_using, worker_implementation, for_run_using[run_using])) + for_run_using[worker_implementation] = (func, schema) + return func + return wrap + + +def configure_taskdesc_for_run(config, job, taskdesc): + """ + Run the appropriate function for this job against the given task + description. + + This will raise an appropriate error if no function exists, or if the job's + run is not valid according to the schema. + """ + run_using = job['run']['using'] + if run_using not in registry: + raise Exception("no functions for run.using {!r}".format(run_using)) + + worker_implementation = job['worker']['implementation'] + if worker_implementation not in registry[run_using]: + raise Exception("no functions for run.using {!r} on {!r}".format( + run_using, worker_implementation)) + + func, schema = registry[run_using][worker_implementation] + if schema: + job['run'] = validate_schema( + schema, job['run'], + "In job.run using {!r} for job {!r}:".format( + job['run']['using'], job['label'])) + + func(config, job, taskdesc) + + +def import_all(): + """Import all modules that are siblings of this one, triggering the decorator + above in the process.""" + for f in os.listdir(os.path.dirname(__file__)): + if f.endswith('.py') and f not in ('commmon.py', '__init__.py'): + __import__('taskgraph.transforms.job.' + f[:-3]) diff --git a/taskcluster/taskgraph/transforms/job/common.py b/taskcluster/taskgraph/transforms/job/common.py new file mode 100644 index 000000000..59a51d75a --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/common.py @@ -0,0 +1,108 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Common support for various job types. These functions are all named after the +worker implementation they operate on, and take the same three parameters, for +consistency. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +SECRET_SCOPE = 'secrets:get:project/releng/gecko/{}/level-{}/{}' + + +def docker_worker_add_workspace_cache(config, job, taskdesc): + """Add the workspace cache based on the build platform/type and level, + except on try where workspace caches are not used.""" + if config.params['project'] == 'try': + return + + taskdesc['worker'].setdefault('caches', []).append({ + 'type': 'persistent', + 'name': 'level-{}-{}-build-{}-{}-workspace'.format( + config.params['level'], config.params['project'], + taskdesc['attributes']['build_platform'], + taskdesc['attributes']['build_type'], + ), + 'mount-point': "/home/worker/workspace", + }) + + +def docker_worker_add_tc_vcs_cache(config, job, taskdesc): + taskdesc['worker'].setdefault('caches', []).append({ + 'type': 'persistent', + 'name': 'level-{}-{}-tc-vcs'.format( + config.params['level'], config.params['project']), + 'mount-point': "/home/worker/.tc-vcs", + }) + + +def docker_worker_add_public_artifacts(config, job, taskdesc): + taskdesc['worker'].setdefault('artifacts', []).append({ + 'name': 'public/build', + 'path': '/home/worker/artifacts/', + 'type': 'directory', + }) + + +def docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc): + """Add the GECKO_BASE_* and GECKO_HEAD_* env vars to the worker.""" + env = taskdesc['worker'].setdefault('env', {}) + env.update({ + 'GECKO_BASE_REPOSITORY': config.params['base_repository'], + 'GECKO_HEAD_REF': config.params['head_rev'], + 'GECKO_HEAD_REPOSITORY': config.params['head_repository'], + 'GECKO_HEAD_REV': config.params['head_rev'], + }) + + +def docker_worker_support_vcs_checkout(config, job, taskdesc): + """Update a job/task with parameters to enable a VCS checkout. + + The configuration is intended for tasks using "run-task" and its + VCS checkout behavior. + """ + level = config.params['level'] + + taskdesc['worker'].setdefault('caches', []).append({ + 'type': 'persistent', + # History of versions: + # + # ``level-%s-checkouts`` was initially used and contained a number + # of backwards incompatible changes, such as moving HG_STORE_PATH + # from a separate cache to this cache. + # + # ``v1`` was introduced to provide a clean break from the unversioned + # cache. + 'name': 'level-%s-checkouts-v1' % level, + 'mount-point': '/home/worker/checkouts', + }) + + taskdesc['worker'].setdefault('env', {}).update({ + 'GECKO_BASE_REPOSITORY': config.params['base_repository'], + 'GECKO_HEAD_REPOSITORY': config.params['head_repository'], + 'GECKO_HEAD_REV': config.params['head_rev'], + 'HG_STORE_PATH': '/home/worker/checkouts/hg-store', + }) + + # Give task access to hgfingerprint secret so it can pin the certificate + # for hg.mozilla.org. + taskdesc['scopes'].append('secrets:get:project/taskcluster/gecko/hgfingerprint') + taskdesc['worker']['taskcluster-proxy'] = True + + +def docker_worker_setup_secrets(config, job, taskdesc): + """Set up access to secrets via taskcluster-proxy. The value of + run['secrets'] should be a boolean or a list of secret names that + can be accessed.""" + if not job['run'].get('secrets'): + return + + taskdesc['worker']['taskcluster-proxy'] = True + secrets = job['run']['secrets'] + if secrets is True: + secrets = ['*'] + for sec in secrets: + taskdesc['scopes'].append(SECRET_SCOPE.format( + job['treeherder']['kind'], config.params['level'], sec)) diff --git a/taskcluster/taskgraph/transforms/job/hazard.py b/taskcluster/taskgraph/transforms/job/hazard.py new file mode 100644 index 000000000..c5b500843 --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/hazard.py @@ -0,0 +1,91 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Support for running hazard jobs via dedicated scripts +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from voluptuous import Schema, Required, Optional, Any + +from taskgraph.transforms.job import run_job_using +from taskgraph.transforms.job.common import ( + docker_worker_add_workspace_cache, + docker_worker_setup_secrets, + docker_worker_add_public_artifacts, + docker_worker_support_vcs_checkout, +) + +haz_run_schema = Schema({ + Required('using'): 'hazard', + + # The command to run within the task image (passed through to the worker) + Required('command'): basestring, + + # The tooltool manifest to use; default in the script is used if omitted + Optional('tooltool-manifest'): basestring, + + # The mozconfig to use; default in the script is used if omitted + Optional('mozconfig'): basestring, + + # The set of secret names to which the task has access; these are prefixed + # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting + # this will enable any worker features required and set the task's scopes + # appropriately. `true` here means ['*'], all secrets. Not supported on + # Windows + Required('secrets', default=False): Any(bool, [basestring]), +}) + + +@run_job_using("docker-worker", "hazard", schema=haz_run_schema) +def docker_worker_hazard(config, job, taskdesc): + run = job['run'] + + worker = taskdesc['worker'] + worker['artifacts'] = [] + worker['caches'] = [] + + docker_worker_add_public_artifacts(config, job, taskdesc) + docker_worker_add_workspace_cache(config, job, taskdesc) + docker_worker_setup_secrets(config, job, taskdesc) + docker_worker_support_vcs_checkout(config, job, taskdesc) + + env = worker['env'] + env.update({ + 'MOZ_BUILD_DATE': config.params['moz_build_date'], + 'MOZ_SCM_LEVEL': config.params['level'], + }) + + # script parameters + if run.get('tooltool-manifest'): + env['TOOLTOOL_MANIFEST'] = run['tooltool-manifest'] + if run.get('mozconfig'): + env['MOZCONFIG'] = run['mozconfig'] + + # tooltool downloads + worker['caches'].append({ + 'type': 'persistent', + 'name': 'tooltool-cache', + 'mount-point': '/home/worker/tooltool-cache', + }) + worker['relengapi-proxy'] = True + taskdesc['scopes'].extend([ + 'docker-worker:relengapi-proxy:tooltool.download.public', + ]) + env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' + env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool' + env['TOOLTOOL_REV'] = 'master' + + # build-haz-linux.sh needs this otherwise it assumes the checkout is in + # the workspace. + env['GECKO_DIR'] = '/home/worker/checkouts/gecko' + + worker['command'] = [ + '/home/worker/bin/run-task', + '--chown-recursive', '/home/worker/tooltool-cache', + '--chown-recursive', '/home/worker/workspace', + '--vcs-checkout', '/home/worker/checkouts/gecko', + '--', + '/bin/bash', '-c', run['command'] + ] diff --git a/taskcluster/taskgraph/transforms/job/mach.py b/taskcluster/taskgraph/transforms/job/mach.py new file mode 100644 index 000000000..8df202dbc --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/mach.py @@ -0,0 +1,30 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Support for running mach tasks (via run-task) +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from taskgraph.transforms.job import run_job_using +from taskgraph.transforms.job.run_task import docker_worker_run_task +from voluptuous import Schema, Required + +mach_schema = Schema({ + Required('using'): 'mach', + + # The mach command (omitting `./mach`) to run + Required('mach'): basestring, +}) + + +@run_job_using("docker-worker", "mach", schema=mach_schema) +def docker_worker_mach(config, job, taskdesc): + run = job['run'] + + # defer to the run_task implementation + run['command'] = 'cd /home/worker/checkouts/gecko && ./mach ' + run['mach'] + run['checkout'] = True + del run['mach'] + docker_worker_run_task(config, job, taskdesc) diff --git a/taskcluster/taskgraph/transforms/job/mozharness.py b/taskcluster/taskgraph/transforms/job/mozharness.py new file mode 100644 index 000000000..fb3cd00dd --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/mozharness.py @@ -0,0 +1,226 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" + +Support for running jobs via mozharness. Ideally, most stuff gets run this +way, and certainly anything using mozharness should use this approach. + +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from voluptuous import Schema, Required, Optional, Any + +from taskgraph.transforms.job import run_job_using +from taskgraph.transforms.job.common import ( + docker_worker_add_workspace_cache, + docker_worker_add_gecko_vcs_env_vars, + docker_worker_setup_secrets, + docker_worker_add_public_artifacts, + docker_worker_support_vcs_checkout, +) + +COALESCE_KEY = 'builds.{project}.{name}' + +mozharness_run_schema = Schema({ + Required('using'): 'mozharness', + + # the mozharness script used to run this task, relative to the testing/ + # directory and using forward slashes even on Windows + Required('script'): basestring, + + # the config files required for the task, relative to + # testing/mozharness/configs and using forward slashes even on Windows + Required('config'): [basestring], + + # any additional actions to pass to the mozharness command; not supported + # on Windows + Optional('actions'): [basestring], + + # any additional options (without leading --) to be passed to mozharness; + # not supported on Windows + Optional('options'): [basestring], + + # --custom-build-variant-cfg value (not supported on Windows) + Optional('custom-build-variant-cfg'): basestring, + + # If not false, tooltool downloads will be enabled via relengAPIProxy + # for either just public files, or all files. Not supported on Windows + Required('tooltool-downloads', default=False): Any( + False, + 'public', + 'internal', + ), + + # The set of secret names to which the task has access; these are prefixed + # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting + # this will enable any worker features required and set the task's scopes + # appropriately. `true` here means ['*'], all secrets. Not supported on + # Windows + Required('secrets', default=False): Any(bool, [basestring]), + + # If true, taskcluster proxy will be enabled; note that it may also be enabled + # automatically e.g., for secrets support. Not supported on Windows. + Required('taskcluster-proxy', default=False): bool, + + # If true, the build scripts will start Xvfb. Not supported on Windows. + Required('need-xvfb', default=False): bool, + + # If false, indicate that builds should skip producing artifacts. Not + # supported on Windows. + Required('keep-artifacts', default=True): bool, + + # If specified, use the in-tree job script specified. + Optional('job-script'): basestring, +}) + + +@run_job_using("docker-worker", "mozharness", schema=mozharness_run_schema) +def mozharness_on_docker_worker_setup(config, job, taskdesc): + run = job['run'] + + worker = taskdesc['worker'] + worker['implementation'] = job['worker']['implementation'] + + # running via mozharness assumes desktop-build (which contains build.sh) + taskdesc['worker']['docker-image'] = {"in-tree": "desktop-build"} + + worker['relengapi-proxy'] = False # but maybe enabled for tooltool below + worker['taskcluster-proxy'] = run.get('taskcluster-proxy') + + docker_worker_add_public_artifacts(config, job, taskdesc) + docker_worker_add_workspace_cache(config, job, taskdesc) + docker_worker_support_vcs_checkout(config, job, taskdesc) + + env = worker.setdefault('env', {}) + env.update({ + 'MOZHARNESS_CONFIG': ' '.join(run['config']), + 'MOZHARNESS_SCRIPT': run['script'], + 'MH_BRANCH': config.params['project'], + 'MH_BUILD_POOL': 'taskcluster', + 'MOZ_BUILD_DATE': config.params['moz_build_date'], + 'MOZ_SCM_LEVEL': config.params['level'], + }) + + if 'actions' in run: + env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions']) + + if 'options' in run: + env['MOZHARNESS_OPTIONS'] = ' '.join(run['options']) + + if 'custom-build-variant-cfg' in run: + env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run['custom-build-variant-cfg'] + + if 'job-script' in run: + env['JOB_SCRIPT'] = run['job-script'] + + # if we're not keeping artifacts, set some env variables to empty values + # that will cause the build process to skip copying the results to the + # artifacts directory. This will have no effect for operations that are + # not builds. + if not run['keep-artifacts']: + env['DIST_TARGET_UPLOADS'] = '' + env['DIST_UPLOADS'] = '' + + # Xvfb + if run['need-xvfb']: + env['NEED_XVFB'] = 'true' + + # tooltool downloads + if run['tooltool-downloads']: + worker['relengapi-proxy'] = True + worker['caches'].append({ + 'type': 'persistent', + 'name': 'tooltool-cache', + 'mount-point': '/home/worker/tooltool-cache', + }) + taskdesc['scopes'].extend([ + 'docker-worker:relengapi-proxy:tooltool.download.public', + ]) + if run['tooltool-downloads'] == 'internal': + taskdesc['scopes'].append( + 'docker-worker:relengapi-proxy:tooltool.download.internal') + env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' + env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool' + env['TOOLTOOL_REV'] = 'master' + + # Retry if mozharness returns TBPL_RETRY + worker['retry-exit-status'] = 4 + + docker_worker_setup_secrets(config, job, taskdesc) + + command = [ + '/home/worker/bin/run-task', + # Various caches/volumes are default owned by root:root. + '--chown-recursive', '/home/worker/workspace', + '--chown-recursive', '/home/worker/tooltool-cache', + '--vcs-checkout', '/home/worker/workspace/build/src', + '--tools-checkout', '/home/worker/workspace/build/tools', + '--', + ] + command.append("/home/worker/workspace/build/src/{}".format( + run.get('job-script', + "taskcluster/scripts/builder/build-linux.sh" + ))) + + worker['command'] = command + + +# We use the generic worker to run tasks on Windows +@run_job_using("generic-worker", "mozharness", schema=mozharness_run_schema) +def mozharness_on_windows(config, job, taskdesc): + run = job['run'] + + # fail if invalid run options are included + invalid = [] + for prop in ['actions', 'custom-build-variant-cfg', + 'tooltool-downloads', 'secrets', 'taskcluster-proxy', + 'need-xvfb']: + if prop in run and run[prop]: + invalid.append(prop) + if not run.get('keep-artifacts', True): + invalid.append('keep-artifacts') + if invalid: + raise Exception("Jobs run using mozharness on Windows do not support properties " + + ', '.join(invalid)) + + worker = taskdesc['worker'] + + worker['artifacts'] = [{ + 'path': r'public\build', + 'type': 'directory', + }] + + docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) + + env = worker['env'] + env.update({ + 'MOZ_BUILD_DATE': config.params['moz_build_date'], + 'MOZ_SCM_LEVEL': config.params['level'], + 'TOOLTOOL_REPO': 'https://github.com/mozilla/build-tooltool', + 'TOOLTOOL_REV': 'master', + }) + + mh_command = [r'c:\mozilla-build\python\python.exe'] + mh_command.append('\\'.join([r'.\build\src\testing', run['script'].replace('/', '\\')])) + for cfg in run['config']: + mh_command.append('--config ' + cfg.replace('/', '\\')) + mh_command.append('--branch ' + config.params['project']) + mh_command.append(r'--skip-buildbot-actions --work-dir %cd:Z:=z:%\build') + for option in run.get('options', []): + mh_command.append('--' + option) + + hg_command = ['"c:\\Program Files\\Mercurial\\hg.exe"'] + hg_command.append('robustcheckout') + hg_command.extend(['--sharebase', 'y:\\hg-shared']) + hg_command.append('--purge') + hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified']) + hg_command.extend(['--revision', env['GECKO_HEAD_REV']]) + hg_command.append(env['GECKO_HEAD_REPOSITORY']) + hg_command.append('.\\build\\src') + + worker['command'] = [ + ' '.join(hg_command), + ' '.join(mh_command) + ] diff --git a/taskcluster/taskgraph/transforms/job/run_task.py b/taskcluster/taskgraph/transforms/job/run_task.py new file mode 100644 index 000000000..296fe43ee --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/run_task.py @@ -0,0 +1,59 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Support for running jobs that are invoked via the `run-task` script. +""" + +from __future__ import absolute_import, print_function, unicode_literals + +import copy + +from taskgraph.transforms.job import run_job_using +from taskgraph.transforms.job.common import ( + docker_worker_support_vcs_checkout, +) +from voluptuous import Schema, Required, Any + +run_task_schema = Schema({ + Required('using'): 'run-task', + + # if true, add a cache at ~worker/.cache, which is where things like pip + # tend to hide their caches. This cache is never added for level-1 jobs. + Required('cache-dotcache', default=False): bool, + + # if true (the default), perform a checkout in /home/worker/checkouts/gecko + Required('checkout', default=True): bool, + + # The command arguments to pass to the `run-task` script, after the + # checkout arguments. If a list, it will be passed directly; otherwise + # it will be included in a single argument to `bash -cx`. + Required('command'): Any([basestring], basestring), +}) + + +@run_job_using("docker-worker", "run-task", schema=run_task_schema) +def docker_worker_run_task(config, job, taskdesc): + run = job['run'] + + worker = taskdesc['worker'] = copy.deepcopy(job['worker']) + + if run['checkout']: + docker_worker_support_vcs_checkout(config, job, taskdesc) + + if run.get('cache-dotcache') and int(config.params['level']) > 1: + worker['caches'].append({ + 'type': 'persistent', + 'name': 'level-{level}-{project}-dotcache'.format(**config.params), + 'mount-point': '/home/worker/.cache', + }) + + run_command = run['command'] + if isinstance(run_command, basestring): + run_command = ['bash', '-cx', run_command] + command = ['/home/worker/bin/run-task'] + if run['checkout']: + command.append('--vcs-checkout=/home/worker/checkouts/gecko') + command.append('--') + command.extend(run_command) + worker['command'] = command diff --git a/taskcluster/taskgraph/transforms/job/spidermonkey.py b/taskcluster/taskgraph/transforms/job/spidermonkey.py new file mode 100644 index 000000000..d78b78504 --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/spidermonkey.py @@ -0,0 +1,86 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Support for running spidermonkey jobs via dedicated scripts +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from voluptuous import Schema, Required, Optional, Any + +from taskgraph.transforms.job import run_job_using +from taskgraph.transforms.job.common import ( + docker_worker_add_public_artifacts, + docker_worker_support_vcs_checkout, +) + +sm_run_schema = Schema({ + Required('using'): Any('spidermonkey', 'spidermonkey-package', 'spidermonkey-mozjs-crate'), + + # The SPIDERMONKEY_VARIANT + Required('spidermonkey-variant'): basestring, + + # The tooltool manifest to use; default from sm-tooltool-config.sh is used + # if omitted + Optional('tooltool-manifest'): basestring, +}) + + +@run_job_using("docker-worker", "spidermonkey") +@run_job_using("docker-worker", "spidermonkey-package") +@run_job_using("docker-worker", "spidermonkey-mozjs-crate") +def docker_worker_spidermonkey(config, job, taskdesc, schema=sm_run_schema): + run = job['run'] + + worker = taskdesc['worker'] + worker['artifacts'] = [] + worker['caches'] = [] + + if int(config.params['level']) > 1: + worker['caches'].append({ + 'type': 'persistent', + 'name': 'level-{}-{}-build-spidermonkey-workspace'.format( + config.params['level'], config.params['project']), + 'mount-point': "/home/worker/workspace", + }) + + docker_worker_add_public_artifacts(config, job, taskdesc) + + env = worker['env'] + env.update({ + 'MOZHARNESS_DISABLE': 'true', + 'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'], + 'MOZ_BUILD_DATE': config.params['moz_build_date'], + 'MOZ_SCM_LEVEL': config.params['level'], + }) + + # tooltool downloads; note that this script downloads using the API + # endpoiint directly, rather than via relengapi-proxy + worker['caches'].append({ + 'type': 'persistent', + 'name': 'tooltool-cache', + 'mount-point': '/home/worker/tooltool-cache', + }) + env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' + if run.get('tooltool-manifest'): + env['TOOLTOOL_MANIFEST'] = run['tooltool-manifest'] + + docker_worker_support_vcs_checkout(config, job, taskdesc) + + script = "build-sm.sh" + if run['using'] == 'spidermonkey-package': + script = "build-sm-package.sh" + elif run['using'] == 'spidermonkey-mozjs-crate': + script = "build-sm-mozjs-crate.sh" + + worker['command'] = [ + '/home/worker/bin/run-task', + '--chown-recursive', '/home/worker/workspace', + '--chown-recursive', '/home/worker/tooltool-cache', + '--vcs-checkout', '/home/worker/workspace/build/src', + '--', + '/bin/bash', + '-c', + 'cd /home/worker && workspace/build/src/taskcluster/scripts/builder/%s' % script + ] diff --git a/taskcluster/taskgraph/transforms/job/toolchain.py b/taskcluster/taskgraph/transforms/job/toolchain.py new file mode 100644 index 000000000..d814f7824 --- /dev/null +++ b/taskcluster/taskgraph/transforms/job/toolchain.py @@ -0,0 +1,115 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +""" +Support for running toolchain-building jobs via dedicated scripts +""" + +from __future__ import absolute_import, print_function, unicode_literals + +from voluptuous import Schema, Required + +from taskgraph.transforms.job import run_job_using +from taskgraph.transforms.job.common import ( + docker_worker_add_tc_vcs_cache, + docker_worker_add_gecko_vcs_env_vars +) + +toolchain_run_schema = Schema({ + Required('using'): 'toolchain-script', + + # the script (in taskcluster/scripts/misc) to run + Required('script'): basestring, +}) + + +@run_job_using("docker-worker", "toolchain-script", schema=toolchain_run_schema) +def docker_worker_toolchain(config, job, taskdesc): + run = job['run'] + + worker = taskdesc['worker'] + worker['artifacts'] = [] + worker['caches'] = [] + + worker['artifacts'].append({ + 'name': 'public', + 'path': '/home/worker/workspace/artifacts/', + 'type': 'directory', + }) + + docker_worker_add_tc_vcs_cache(config, job, taskdesc) + docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) + + env = worker['env'] + env.update({ + 'MOZ_BUILD_DATE': config.params['moz_build_date'], + 'MOZ_SCM_LEVEL': config.params['level'], + 'TOOLS_DISABLE': 'true', + }) + + # tooltool downloads; note that this downloads using the API endpoint directly, + # rather than via relengapi-proxy + worker['caches'].append({ + 'type': 'persistent', + 'name': 'tooltool-cache', + 'mount-point': '/home/worker/tooltool-cache', + }) + env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache' + env['TOOLTOOL_REPO'] = 'https://github.com/mozilla/build-tooltool' + env['TOOLTOOL_REV'] = 'master' + + command = ' && '.join([ + "cd /home/worker/", + "./bin/checkout-sources.sh", + "./workspace/build/src/taskcluster/scripts/misc/" + run['script'], + ]) + worker['command'] = ["/bin/bash", "-c", command] + + +@run_job_using("generic-worker", "toolchain-script", schema=toolchain_run_schema) +def windows_toolchain(config, job, taskdesc): + run = job['run'] + + worker = taskdesc['worker'] + + worker['artifacts'] = [{ + 'path': r'public\build', + 'type': 'directory', + }] + + docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc) + + # We fetch LLVM SVN into this. + svn_cache = 'level-{}-toolchain-clang-cl-build-svn'.format(config.params['level']) + worker['mounts'] = [{ + 'cache-name': svn_cache, + 'path': r'llvm-sources', + }] + taskdesc['scopes'].extend([ + 'generic-worker:cache:' + svn_cache, + ]) + + env = worker['env'] + env.update({ + 'MOZ_BUILD_DATE': config.params['moz_build_date'], + 'MOZ_SCM_LEVEL': config.params['level'], + 'TOOLTOOL_REPO': 'https://github.com/mozilla/build-tooltool', + 'TOOLTOOL_REV': 'master', + }) + + hg = r'c:\Program Files\Mercurial\hg.exe' + hg_command = ['"{}"'.format(hg)] + hg_command.append('robustcheckout') + hg_command.extend(['--sharebase', 'y:\\hg-shared']) + hg_command.append('--purge') + hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified']) + hg_command.extend(['--revision', '%GECKO_HEAD_REV%']) + hg_command.append('%GECKO_HEAD_REPOSITORY%') + hg_command.append('.\\build\\src') + + bash = r'c:\mozilla-build\msys\bin\bash' + worker['command'] = [ + ' '.join(hg_command), + # do something intelligent. + r'{} -c ./build/src/taskcluster/scripts/misc/{}'.format(bash, run['script']) + ] |