summaryrefslogtreecommitdiffstats
path: root/testing/docker/rust-build/tcbuild.py
blob: d55c6f3a75e13a9406f39a0a5cf79a3e8dea9735 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
#!/bin/env python
'''
This script triggers a taskcluster task, waits for it to finish,
fetches the artifacts, uploads them to tooltool, and updates
the in-tree tooltool manifests.
'''

from __future__ import print_function

import requests.packages.urllib3
requests.packages.urllib3.disable_warnings()

import argparse
import datetime
import json
import os
import shutil
import sys
import taskcluster
import tempfile
import time
import tooltool

def local_file(filename):
    '''
    Return a path to a file next to this script.
    '''
    return os.path.join(os.path.dirname(__file__), filename)

def read_tc_auth(tc_auth_file):
    '''
    Read taskcluster credentials from tc_auth_file and return them as a dict.
    '''
    return json.load(open(tc_auth_file, 'rb'))

def fill_template_dict(d, keys):
    for key, val in d.items():
        if isinstance(val, basestring) and '{' in val:
            d[key] = val.format(**keys)
        elif isinstance(val, dict):
            fill_template_dict(val, keys)

def fill_template(template_file, keys):
    '''
    Take the file object template_file, parse it as JSON, and
    interpolate (using str.template) its keys using keys.
    '''
    template = json.load(template_file)
    fill_template_dict(template, keys)
    return template

def spawn_task(queue, args):
    '''
    Spawn a Taskcluster task in queue using args.
    '''
    task_id = taskcluster.utils.slugId()
    with open(local_file('task.json'), 'rb') as template:
        keys = vars(args)
        now = datetime.datetime.utcnow()
        keys['task_created'] = now.isoformat() + 'Z'
        keys['task_deadline'] = (now + datetime.timedelta(hours=2)).isoformat() + 'Z'
        keys['artifacts_expires'] = (now + datetime.timedelta(days=1)).isoformat() + 'Z'
        payload = fill_template(template, keys)
    queue.createTask(task_id, payload)
    print('--- %s task %s submitted ---' % (now, task_id))
    return task_id

def wait_for_task(queue, task_id, initial_wait=5):
    '''
    Wait until queue reports that task task_id is completed, and return
    its run id.

    Sleep for initial_wait seconds before checking status the first time.
    Then poll periodically and print a running log of the task status.
    '''
    time.sleep(initial_wait)
    previous_state = None
    have_ticks = False
    while True:
        res = queue.status(task_id)
        state = res['status']['state']
        if state != previous_state:
            now = datetime.datetime.utcnow()
            if have_ticks:
              sys.stdout.write('\n')
              have_ticks = False
            print('--- %s task %s %s ---' % (now, task_id, state))
            previous_state = state
        if state == 'completed':
            return len(res['status']['runs']) - 1
        if state in ('failed', 'exception'):
            raise Exception('Task failed')
        sys.stdout.write('.')
        sys.stdout.flush()
        have_ticks = True
        time.sleep(10)

def fetch_artifact(queue, task_id, run_id, name, dest_dir):
    '''
    Fetch the artifact with name from task_id and run_id in queue,
    write it to a file in dest_dir, and return the path to the written
    file.
    '''
    url = queue.buildUrl('getArtifact', task_id, run_id, name)
    fn = os.path.join(dest_dir, os.path.basename(name))
    print('Fetching %s...' % name)
    try:
        r = requests.get(url, stream=True)
        r.raise_for_status()
        with open(fn, 'wb') as f:
            for chunk in r.iter_content(1024):
                f.write(chunk)
    except requests.exceptions.HTTPError:
        print('HTTP Error %d fetching %s' % (r.status_code, name))
        return None
    return fn

def make_artifact_dir(task_id, run_id):
    prefix = 'tc-artifacts.%s.%d.' % (task_id, run_id)
    print('making artifact dir %s' % prefix)
    return tempfile.mkdtemp(prefix=prefix)

def fetch_artifacts(queue, task_id, run_id):
    '''
    Fetch all artifacts from task_id and run_id in queue, write them to
    temporary files, and yield the path to each.
    '''
    try:
        tempdir = make_artifact_dir(task_id, run_id)
        res = queue.listArtifacts(task_id, run_id)
        for a in res['artifacts']:
            # Skip logs
            if a['name'].startswith('public/logs'):
                continue
            # Skip interfaces
            if a['name'].startswith('private/docker-worker'):
                continue
            yield fetch_artifact(queue, task_id, run_id, a['name'], tempdir)
    finally:
        if os.path.isdir(tempdir):
            #shutil.rmtree(tempdir)
            print('Artifacts downloaded to %s' % tempdir)
            pass

def upload_to_tooltool(tooltool_auth, task_id, artifact):
    '''
    Upload artifact to tooltool using tooltool_auth as the authentication token.
    Return the path to the generated tooltool manifest.
    '''
    try:
        oldcwd = os.getcwd()
        os.chdir(os.path.dirname(artifact))
        manifest = artifact + '.manifest'
        tooltool.main([
            'tooltool.py',
            'add',
            '--visibility=public',
            '-m', manifest,
            artifact
        ])
        tooltool.main([
            'tooltool.py',
            'upload',
            '-m', manifest,
            '--authentication-file', tooltool_auth,
            '--message', 'Built from taskcluster task {}'.format(task_id),
        ])
        return manifest
    finally:
        os.chdir(oldcwd)

def update_manifest(artifact, manifest, local_gecko_clone):
    platform = linux
    manifest_dir = os.path.join(local_gecko_clone,
                                'testing', 'config', 'tooltool-manifests')
    platform_dir = [p for p in os.listdir(manifest_dir)
                    if p.startswith(platform)][0]
    tree_manifest = os.path.join(manifest_dir, platform_dir, 'releng.manifest')
    print('%s -> %s' % (manifest, tree_manifest))
    shutil.copyfile(manifest, tree_manifest)

def main():
    parser = argparse.ArgumentParser(description='Build and upload binaries')
    parser.add_argument('taskcluster_auth', help='Path to a file containing Taskcluster client ID and authentication token as a JSON file in the form {"clientId": "...", "accessToken": "..."}')
    parser.add_argument('--tooltool-auth', help='Path to a file containing a tooltool authentication token valid for uploading files')
    parser.add_argument('--local-gecko-clone', help='Path to a local Gecko clone whose tooltool manifests will be updated with the newly-built binaries')
    parser.add_argument('--rust-branch', default='stable',
                        help='Revision of the rust repository to use')
    parser.add_argument('--task', help='Use an existing task')

    args = parser.parse_args()
    tc_auth = read_tc_auth(args.taskcluster_auth)
    queue = taskcluster.Queue({'credentials': tc_auth})
    if args.task:
        task_id, initial_wait = args.task, 0
    else:
        task_id, initial_wait = spawn_task(queue, args), 25
    run_id = wait_for_task(queue, task_id, initial_wait)
    for artifact in fetch_artifacts(queue, task_id, run_id):
        if args.tooltool_auth:
            manifest = upload_to_tooltool(args.tooltool_auth, task_id, artifact)
        if args.local_gecko_clone:
            update_manifest(artifact, manifest, args.local_gecko_clone)

if __name__ == '__main__':
    main()