summaryrefslogtreecommitdiffstats
path: root/testing/mozharness/scripts/release
diff options
context:
space:
mode:
authorMatt A. Tobin <mattatobin@localhost.localdomain>2018-02-02 04:16:08 -0500
committerMatt A. Tobin <mattatobin@localhost.localdomain>2018-02-02 04:16:08 -0500
commit5f8de423f190bbb79a62f804151bc24824fa32d8 (patch)
tree10027f336435511475e392454359edea8e25895d /testing/mozharness/scripts/release
parent49ee0794b5d912db1f95dce6eb52d781dc210db5 (diff)
downloadUXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.gz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.lz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.xz
UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.zip
Add m-esr52 at 52.6.0
Diffstat (limited to 'testing/mozharness/scripts/release')
-rw-r--r--testing/mozharness/scripts/release/antivirus.py193
-rwxr-xr-xtesting/mozharness/scripts/release/beet_mover.py372
-rw-r--r--testing/mozharness/scripts/release/generate-checksums.py284
-rw-r--r--testing/mozharness/scripts/release/postrelease_bouncer_aliases.py107
-rw-r--r--testing/mozharness/scripts/release/postrelease_mark_as_shipped.py110
-rw-r--r--testing/mozharness/scripts/release/postrelease_version_bump.py184
-rw-r--r--testing/mozharness/scripts/release/publish_balrog.py119
-rw-r--r--testing/mozharness/scripts/release/push-candidate-to-releases.py200
-rw-r--r--testing/mozharness/scripts/release/updates.py299
-rw-r--r--testing/mozharness/scripts/release/uptake_monitoring.py188
10 files changed, 2056 insertions, 0 deletions
diff --git a/testing/mozharness/scripts/release/antivirus.py b/testing/mozharness/scripts/release/antivirus.py
new file mode 100644
index 000000000..b40dc5cc0
--- /dev/null
+++ b/testing/mozharness/scripts/release/antivirus.py
@@ -0,0 +1,193 @@
+from multiprocessing.pool import ThreadPool
+import os
+import re
+import sys
+import shutil
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+
+
+class AntivirusScan(BaseScript, VirtualenvMixin):
+ config_options = [
+ [["--product"], {
+ "dest": "product",
+ "help": "Product being released, eg: firefox, thunderbird",
+ }],
+ [["--version"], {
+ "dest": "version",
+ "help": "Version of release, eg: 39.0b5",
+ }],
+ [["--build-number"], {
+ "dest": "build_number",
+ "help": "Build number of release, eg: 2",
+ }],
+ [["--bucket-name"], {
+ "dest": "bucket_name",
+ "help": "S3 Bucket to retrieve files from",
+ }],
+ [["--exclude"], {
+ "dest": "excludes",
+ "action": "append",
+ "help": "List of filename patterns to exclude. See script source for default",
+ }],
+ [["-d", "--download-parallelization"], {
+ "dest": "download_parallelization",
+ "default": 6,
+ "type": "int",
+ "help": "Number of concurrent file downloads",
+ }],
+ [["-s", "--scan-parallelization"], {
+ "dest": "scan_parallelization",
+ "default": 4,
+ "type": "int",
+ "help": "Number of concurrent file scans",
+ }],
+ [["--tools-repo"], {
+ "dest": "tools_repo",
+ "default": "https://hg.mozilla.org/build/tools",
+ }],
+ [["--tools-revision"], {
+ "dest": "tools_revision",
+ "help": "Revision of tools repo to use when downloading extract_and_run_command.py",
+ }],
+ ] + virtualenv_config_options
+
+ DEFAULT_EXCLUDES = [
+ r"^.*tests.*$",
+ r"^.*crashreporter.*$",
+ r"^.*\.zip(\.asc)?$",
+ r"^.*\.log$",
+ r"^.*\.txt$",
+ r"^.*\.asc$",
+ r"^.*/partner-repacks.*$",
+ r"^.*.checksums(\.asc)?$",
+ r"^.*/logs/.*$",
+ r"^.*/jsshell.*$",
+ r"^.*json$",
+ r"^.*/host.*$",
+ r"^.*/mar-tools/.*$",
+ r"^.*robocop.apk$",
+ r"^.*contrib.*"
+ ]
+ CACHE_DIR = 'cache'
+
+ def __init__(self):
+ BaseScript.__init__(self,
+ config_options=self.config_options,
+ require_config_file=False,
+ config={
+ "virtualenv_modules": [
+ "boto",
+ "redo",
+ "mar",
+ ],
+ "virtualenv_path": "venv",
+ },
+ all_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "get-extract-script",
+ "get-files",
+ "scan-files",
+ "cleanup-cache",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "get-extract-script",
+ "get-files",
+ "scan-files",
+ "cleanup-cache",
+ ],
+ )
+ self.excludes = self.config.get('excludes', self.DEFAULT_EXCLUDES)
+ self.dest_dir = self.CACHE_DIR
+
+ def _get_candidates_prefix(self):
+ return "pub/{}/candidates/{}-candidates/build{}/".format(
+ self.config['product'],
+ self.config["version"],
+ self.config["build_number"]
+ )
+
+ def _matches_exclude(self, keyname):
+ for exclude in self.excludes:
+ if re.search(exclude, keyname):
+ return True
+ return False
+
+ def get_extract_script(self):
+ """Gets a copy of extract_and_run_command.py from tools, and the supporting mar.py,
+ so that we can unpack various files for clam to scan them."""
+ remote_file = "{}/raw-file/{}/stage/extract_and_run_command.py".format(self.config["tools_repo"],
+ self.config["tools_revision"])
+ self.download_file(remote_file, file_name="extract_and_run_command.py")
+
+ def get_files(self):
+ """Pull the candidate files down from S3 for scanning, using parallel requests"""
+ from boto.s3.connection import S3Connection
+ from boto.exception import S3CopyError, S3ResponseError
+ from redo import retry
+ from httplib import HTTPException
+
+ # suppress boto debug logging, it's too verbose with --loglevel=debug
+ import logging
+ logging.getLogger('boto').setLevel(logging.INFO)
+
+ self.info("Connecting to S3")
+ conn = S3Connection(anon=True)
+ self.info("Getting bucket {}".format(self.config["bucket_name"]))
+ bucket = conn.get_bucket(self.config["bucket_name"])
+
+ if os.path.exists(self.dest_dir):
+ self.info('Emptying {}'.format(self.dest_dir))
+ shutil.rmtree(self.dest_dir)
+ os.makedirs(self.dest_dir)
+
+ def worker(item):
+ source, destination = item
+
+ self.info("Downloading {} to {}".format(source, destination))
+ key = bucket.get_key(source)
+ return retry(key.get_contents_to_filename,
+ args=(destination, ),
+ sleeptime=30, max_sleeptime=150,
+ retry_exceptions=(S3CopyError, S3ResponseError,
+ IOError, HTTPException))
+
+ def find_release_files():
+ candidates_prefix = self._get_candidates_prefix()
+ self.info("Getting key names from candidates")
+ for key in bucket.list(prefix=candidates_prefix):
+ keyname = key.name
+ if self._matches_exclude(keyname):
+ self.debug("Excluding {}".format(keyname))
+ else:
+ destination = os.path.join(self.dest_dir, keyname.replace(candidates_prefix, ''))
+ dest_dir = os.path.dirname(destination)
+ if not os.path.isdir(dest_dir):
+ os.makedirs(dest_dir)
+ yield (keyname, destination)
+
+ pool = ThreadPool(self.config["download_parallelization"])
+ pool.map(worker, find_release_files())
+
+ def scan_files(self):
+ """Scan the files we've collected. We do the download and scan concurrently to make
+ it easier to have a coherent log afterwards. Uses the venv python."""
+ self.run_command([self.query_python_path(), 'extract_and_run_command.py',
+ '-j{}'.format(self.config['scan_parallelization']),
+ 'clamdscan', '-m', '--no-summary', '--', self.dest_dir])
+
+ def cleanup_cache(self):
+ """If we have simultaneous releases in flight an av slave may end up doing another
+ av job before being recycled, and we need to make sure the full disk is available."""
+ shutil.rmtree(self.dest_dir)
+
+
+if __name__ == "__main__":
+ myScript = AntivirusScan()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/release/beet_mover.py b/testing/mozharness/scripts/release/beet_mover.py
new file mode 100755
index 000000000..adc8b19e1
--- /dev/null
+++ b/testing/mozharness/scripts/release/beet_mover.py
@@ -0,0 +1,372 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""beet_mover.py.
+
+downloads artifacts, scans them and uploads them to s3
+"""
+import hashlib
+import sys
+import os
+import pprint
+import re
+from os import listdir
+from os.path import isfile, join
+import sh
+import redo
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+from mozharness.base.log import FATAL
+from mozharness.base.python import VirtualenvMixin
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.aws import pop_aws_auth_from_env
+import mozharness
+import mimetypes
+
+
+def get_hash(content, hash_type="md5"):
+ h = hashlib.new(hash_type)
+ h.update(content)
+ return h.hexdigest()
+
+
+CONFIG_OPTIONS = [
+ [["--template"], {
+ "dest": "template",
+ "help": "Specify jinja2 template file",
+ }],
+ [['--locale', ], {
+ "action": "extend",
+ "dest": "locales",
+ "type": "string",
+ "help": "Specify the locale(s) to upload."}],
+ [["--platform"], {
+ "dest": "platform",
+ "help": "Specify the platform of the build",
+ }],
+ [["--version"], {
+ "dest": "version",
+ "help": "full release version based on gecko and tag/stage identifier. e.g. '44.0b1'"
+ }],
+ [["--app-version"], {
+ "dest": "app_version",
+ "help": "numbered version based on gecko. e.g. '44.0'"
+ }],
+ [["--partial-version"], {
+ "dest": "partial_version",
+ "help": "the partial version the mar is based off of"
+ }],
+ [["--artifact-subdir"], {
+ "dest": "artifact_subdir",
+ "default": 'build',
+ "help": "subdir location for taskcluster artifacts after public/ base.",
+ }],
+ [["--build-num"], {
+ "dest": "build_num",
+ "help": "the release build identifier"
+ }],
+ [["--taskid"], {
+ "dest": "taskid",
+ "help": "taskcluster task id to download artifacts from",
+ }],
+ [["--bucket"], {
+ "dest": "bucket",
+ "help": "s3 bucket to move beets to.",
+ }],
+ [["--product"], {
+ "dest": "product",
+ "help": "product for which artifacts are beetmoved",
+ }],
+ [["--exclude"], {
+ "dest": "excludes",
+ "action": "append",
+ "help": "List of filename patterns to exclude. See script source for default",
+ }],
+ [["-s", "--scan-parallelization"], {
+ "dest": "scan_parallelization",
+ "default": 4,
+ "type": "int",
+ "help": "Number of concurrent file scans",
+ }],
+]
+
+DEFAULT_EXCLUDES = [
+ r"^.*tests.*$",
+ r"^.*crashreporter.*$",
+ r"^.*\.zip(\.asc)?$",
+ r"^.*\.log$",
+ r"^.*\.txt$",
+ r"^.*\.asc$",
+ r"^.*/partner-repacks.*$",
+ r"^.*.checksums(\.asc)?$",
+ r"^.*/logs/.*$",
+ r"^.*/jsshell.*$",
+ r"^.*json$",
+ r"^.*/host.*$",
+ r"^.*/mar-tools/.*$",
+ r"^.*robocop.apk$",
+ r"^.*contrib.*"
+]
+CACHE_DIR = 'cache'
+
+MIME_MAP = {
+ '': 'text/plain',
+ '.asc': 'text/plain',
+ '.beet': 'text/plain',
+ '.bundle': 'application/octet-stream',
+ '.bz2': 'application/octet-stream',
+ '.checksums': 'text/plain',
+ '.dmg': 'application/x-iso9660-image',
+ '.mar': 'application/octet-stream',
+ '.xpi': 'application/x-xpinstall'
+}
+
+HASH_FORMATS = ["sha512", "sha256"]
+
+
+class BeetMover(BaseScript, VirtualenvMixin, object):
+ def __init__(self, aws_creds):
+ beetmover_kwargs = {
+ 'config_options': CONFIG_OPTIONS,
+ 'all_actions': [
+ # 'clobber',
+ 'create-virtualenv',
+ 'activate-virtualenv',
+ 'generate-candidates-manifest',
+ 'refresh-antivirus',
+ 'verify-bits', # beets
+ 'download-bits', # beets
+ 'scan-bits', # beets
+ 'upload-bits', # beets
+ ],
+ 'require_config_file': False,
+ # Default configuration
+ 'config': {
+ # base index url where to find taskcluster artifact based on taskid
+ "artifact_base_url": 'https://queue.taskcluster.net/v1/task/{taskid}/artifacts/public/{subdir}',
+ "virtualenv_modules": [
+ "boto",
+ "PyYAML",
+ "Jinja2",
+ "redo",
+ "cryptography==2.0.3",
+ "mar",
+ ],
+ "virtualenv_path": "venv",
+ },
+ }
+ #todo do excludes need to be configured via command line for specific builds?
+ super(BeetMover, self).__init__(**beetmover_kwargs)
+
+ c = self.config
+ self.manifest = {}
+ # assigned in _post_create_virtualenv
+ self.virtualenv_imports = None
+ self.bucket = c['bucket']
+ if not all(aws_creds):
+ self.fatal('credentials must be passed in env: "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"')
+ self.aws_key_id, self.aws_secret_key = aws_creds
+ # if excludes is set from command line, use it otherwise use defaults
+ self.excludes = self.config.get('excludes', DEFAULT_EXCLUDES)
+ dirs = self.query_abs_dirs()
+ self.dest_dir = os.path.join(dirs['abs_work_dir'], CACHE_DIR)
+ self.mime_fix()
+
+ def activate_virtualenv(self):
+ """
+ activates virtualenv and adds module imports to a instance wide namespace.
+
+ creating and activating a virtualenv onto the currently executing python interpreter is a
+ bit black magic. Rather than having import statements added in various places within the
+ script, we import them here immediately after we activate the newly created virtualenv
+ """
+ VirtualenvMixin.activate_virtualenv(self)
+
+ import boto
+ import yaml
+ import jinja2
+ self.virtualenv_imports = {
+ 'boto': boto,
+ 'yaml': yaml,
+ 'jinja2': jinja2,
+ }
+ self.log("activated virtualenv with the modules: {}".format(str(self.virtualenv_imports)))
+
+ def _get_template_vars(self):
+ return {
+ "platform": self.config['platform'],
+ "locales": self.config.get('locales'),
+ "version": self.config['version'],
+ "app_version": self.config.get('app_version', ''),
+ "partial_version": self.config.get('partial_version', ''),
+ "build_num": self.config['build_num'],
+ # keep the trailing slash
+ "s3_prefix": 'pub/{prod}/candidates/{ver}-candidates/{n}/'.format(
+ prod=self.config['product'], ver=self.config['version'],
+ n=self.config['build_num']
+ ),
+ "artifact_base_url": self.config['artifact_base_url'].format(
+ taskid=self.config['taskid'], subdir=self.config['artifact_subdir']
+ )
+ }
+
+ def generate_candidates_manifest(self):
+ """
+ generates and outputs a manifest that maps expected Taskcluster artifact names
+ to release deliverable names
+ """
+ self.log('generating manifest from {}...'.format(self.config['template']))
+ template_dir, template_file = os.path.split(os.path.abspath(self.config['template']))
+ jinja2 = self.virtualenv_imports['jinja2']
+ yaml = self.virtualenv_imports['yaml']
+
+ jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
+ undefined=jinja2.StrictUndefined)
+ template = jinja_env.get_template(template_file)
+ self.manifest = yaml.safe_load(template.render(**self._get_template_vars()))
+
+ self.log("manifest generated:")
+ self.log(pprint.pformat(self.manifest['mapping']))
+
+ def verify_bits(self):
+ """
+ inspects each artifact and verifies that they were created by trustworthy tasks
+ """
+ # TODO
+ self.log('skipping verification. unimplemented...')
+
+ def refresh_antivirus(self):
+ self.info("Refreshing clamav db...")
+ try:
+ redo.retry(lambda:
+ sh.freshclam("--stdout", "--verbose", _timeout=300,
+ _err_to_out=True))
+ self.info("Done.")
+ except sh.ErrorReturnCode:
+ self.warning("Freshclam failed, skipping DB update")
+
+ def download_bits(self):
+ """
+ downloads list of artifacts to self.dest_dir dir based on a given manifest
+ """
+ self.log('downloading and uploading artifacts to self_dest_dir...')
+ dirs = self.query_abs_dirs()
+
+ for locale in self.manifest['mapping']:
+ for deliverable in self.manifest['mapping'][locale]:
+ self.log("downloading '{}' deliverable for '{}' locale".format(deliverable, locale))
+ source = self.manifest['mapping'][locale][deliverable]['artifact']
+ self.retry(
+ self.download_file,
+ args=[source],
+ kwargs={'parent_dir': dirs['abs_work_dir']},
+ error_level=FATAL)
+ self.log('Success!')
+
+ def _strip_prefix(self, s3_key):
+ """Return file name relative to prefix"""
+ # "abc/def/hfg".split("abc/de")[-1] == "f/hfg"
+ return s3_key.split(self._get_template_vars()["s3_prefix"])[-1]
+
+ def upload_bits(self):
+ """
+ uploads list of artifacts to s3 candidates dir based on a given manifest
+ """
+ self.log('uploading artifacts to s3...')
+ dirs = self.query_abs_dirs()
+
+ # connect to s3
+ boto = self.virtualenv_imports['boto']
+ conn = boto.connect_s3(self.aws_key_id, self.aws_secret_key)
+ bucket = conn.get_bucket(self.bucket)
+
+ for locale in self.manifest['mapping']:
+ for deliverable in self.manifest['mapping'][locale]:
+ self.log("uploading '{}' deliverable for '{}' locale".format(deliverable, locale))
+ # we have already downloaded the files locally so we can use that version
+ source = self.manifest['mapping'][locale][deliverable]['artifact']
+ s3_key = self.manifest['mapping'][locale][deliverable]['s3_key']
+ downloaded_file = os.path.join(dirs['abs_work_dir'], self.get_filename_from_url(source))
+ # generate checksums for every uploaded file
+ beet_file_name = '{}.beet'.format(downloaded_file)
+ # upload checksums to a separate subdirectory
+ beet_dest = '{prefix}beetmover-checksums/{f}.beet'.format(
+ prefix=self._get_template_vars()["s3_prefix"],
+ f=self._strip_prefix(s3_key)
+ )
+ beet_contents = '\n'.join([
+ '{hash} {fmt} {size} {name}'.format(
+ hash=self.get_hash_for_file(downloaded_file, hash_type=fmt),
+ fmt=fmt,
+ size=os.path.getsize(downloaded_file),
+ name=self._strip_prefix(s3_key)) for fmt in HASH_FORMATS
+ ])
+ self.write_to_file(beet_file_name, beet_contents)
+ self.upload_bit(source=downloaded_file, s3_key=s3_key,
+ bucket=bucket)
+ self.upload_bit(source=beet_file_name, s3_key=beet_dest,
+ bucket=bucket)
+ self.log('Success!')
+
+
+ def upload_bit(self, source, s3_key, bucket):
+ boto = self.virtualenv_imports['boto']
+ self.info('uploading to s3 with key: {}'.format(s3_key))
+ key = boto.s3.key.Key(bucket) # create new key
+ key.key = s3_key # set key name
+
+ self.info("Checking if `{}` already exists".format(s3_key))
+ key = bucket.get_key(s3_key)
+ if not key:
+ self.info("Uploading to `{}`".format(s3_key))
+ key = bucket.new_key(s3_key)
+ # set key value
+ mime_type, _ = mimetypes.guess_type(source)
+ self.retry(lambda: key.set_contents_from_filename(source, headers={'Content-Type': mime_type}),
+ error_level=FATAL),
+ else:
+ if not get_hash(key.get_contents_as_string()) == get_hash(open(source).read()):
+ # for now, let's halt. If necessary, we can revisit this and allow for overwrites
+ # to the same buildnum release with different bits
+ self.fatal("`{}` already exists with different checksum.".format(s3_key))
+ self.log("`{}` has the same MD5 checksum, not uploading".format(s3_key))
+
+ def scan_bits(self):
+
+ dirs = self.query_abs_dirs()
+
+ filenames = [f for f in listdir(dirs['abs_work_dir']) if isfile(join(dirs['abs_work_dir'], f))]
+ self.mkdir_p(self.dest_dir)
+ for file_name in filenames:
+ if self._matches_exclude(file_name):
+ self.info("Excluding {} from virus scan".format(file_name))
+ else:
+ self.info('Copying {} to {}'.format(file_name,self.dest_dir))
+ self.copyfile(os.path.join(dirs['abs_work_dir'], file_name), os.path.join(self.dest_dir,file_name))
+ self._scan_files()
+ self.info('Emptying {}'.format(self.dest_dir))
+ self.rmtree(self.dest_dir)
+
+ def _scan_files(self):
+ """Scan the files we've collected. We do the download and scan concurrently to make
+ it easier to have a coherent log afterwards. Uses the venv python."""
+ external_tools_path = os.path.join(
+ os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))), 'external_tools')
+ self.run_command([self.query_python_path(), os.path.join(external_tools_path,'extract_and_run_command.py'),
+ '-j{}'.format(self.config['scan_parallelization']),
+ 'clamscan', '--no-summary', '--', self.dest_dir])
+
+ def _matches_exclude(self, keyname):
+ return any(re.search(exclude, keyname) for exclude in self.excludes)
+
+ def mime_fix(self):
+ """ Add mimetypes for custom extensions """
+ mimetypes.init()
+ map(lambda (ext, mime_type,): mimetypes.add_type(mime_type, ext), MIME_MAP.items())
+
+if __name__ == '__main__':
+ beet_mover = BeetMover(pop_aws_auth_from_env())
+ beet_mover.run_and_exit()
diff --git a/testing/mozharness/scripts/release/generate-checksums.py b/testing/mozharness/scripts/release/generate-checksums.py
new file mode 100644
index 000000000..61a1c43d2
--- /dev/null
+++ b/testing/mozharness/scripts/release/generate-checksums.py
@@ -0,0 +1,284 @@
+from multiprocessing.pool import ThreadPool
+import os
+from os import path
+import re
+import sys
+import posixpath
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+from mozharness.base.vcs.vcsbase import VCSMixin
+from mozharness.mozilla.checksums import parse_checksums_file
+from mozharness.mozilla.signing import SigningMixin
+from mozharness.mozilla.buildbot import BuildbotMixin
+
+class ChecksumsGenerator(BaseScript, VirtualenvMixin, SigningMixin, VCSMixin, BuildbotMixin):
+ config_options = [
+ [["--stage-product"], {
+ "dest": "stage_product",
+ "help": "Name of product used in file server's directory structure, eg: firefox, mobile",
+ }],
+ [["--version"], {
+ "dest": "version",
+ "help": "Version of release, eg: 39.0b5",
+ }],
+ [["--build-number"], {
+ "dest": "build_number",
+ "help": "Build number of release, eg: 2",
+ }],
+ [["--bucket-name-prefix"], {
+ "dest": "bucket_name_prefix",
+ "help": "Prefix of bucket name, eg: net-mozaws-prod-delivery. This will be used to generate a full bucket name (such as net-mozaws-prod-delivery-{firefox,archive}.",
+ }],
+ [["--bucket-name-full"], {
+ "dest": "bucket_name_full",
+ "help": "Full bucket name, eg: net-mozaws-prod-delivery-firefox",
+ }],
+ [["-j", "--parallelization"], {
+ "dest": "parallelization",
+ "default": 20,
+ "type": int,
+ "help": "Number of checksums file to download concurrently",
+ }],
+ [["-f", "--format"], {
+ "dest": "formats",
+ "default": [],
+ "action": "append",
+ "help": "Format(s) to generate big checksums file for. Default: sha512",
+ }],
+ [["--include"], {
+ "dest": "includes",
+ "default": [],
+ "action": "append",
+ "help": "List of patterns to include in big checksums file. See script source for default.",
+ }],
+ [["--tools-repo"], {
+ "dest": "tools_repo",
+ "default": "https://hg.mozilla.org/build/tools",
+ }],
+ [["--credentials"], {
+ "dest": "credentials",
+ "help": "File containing access key and secret access key for S3",
+ }],
+ ] + virtualenv_config_options
+
+ def __init__(self):
+ BaseScript.__init__(self,
+ config_options=self.config_options,
+ require_config_file=False,
+ config={
+ "virtualenv_modules": [
+ "pip==1.5.5",
+ "boto",
+ ],
+ "virtualenv_path": "venv",
+ 'buildbot_json_path': 'buildprops.json',
+ },
+ all_actions=[
+ "create-virtualenv",
+ "collect-individual-checksums",
+ "create-big-checksums",
+ "sign",
+ "upload",
+ "copy-info-files",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "collect-individual-checksums",
+ "create-big-checksums",
+ "sign",
+ "upload",
+ ],
+ )
+
+ self.checksums = {}
+ self.bucket = None
+ self.bucket_name = self._get_bucket_name()
+ self.file_prefix = self._get_file_prefix()
+ # set the env var for boto to read our special config file
+ # rather than anything else we have at ~/.boto
+ os.environ["BOTO_CONFIG"] = os.path.abspath(self.config["credentials"])
+
+ def _pre_config_lock(self, rw_config):
+ super(ChecksumsGenerator, self)._pre_config_lock(rw_config)
+
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ # TODO: version should come from repo
+ props = self.buildbot_config["properties"]
+ for prop in ['version', 'build_number']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+
+ # These defaults are set here rather in the config because default
+ # lists cannot be completely overidden, only appended to.
+ if not self.config.get("formats"):
+ self.config["formats"] = ["sha512", "sha256"]
+
+ if not self.config.get("includes"):
+ self.config["includes"] = [
+ r"^.*\.tar\.bz2$",
+ r"^.*\.tar\.xz$",
+ r"^.*\.dmg$",
+ r"^.*\.bundle$",
+ r"^.*\.mar$",
+ r"^.*Setup.*\.exe$",
+ r"^.*\.xpi$",
+ r"^.*fennec.*\.apk$",
+ ]
+
+ def _get_bucket_name(self):
+ if self.config.get('bucket_name_full'):
+ return self.config['bucket_name_full']
+
+ suffix = "archive"
+ # Firefox has a special bucket, per https://github.com/mozilla-services/product-delivery-tools/blob/master/bucketmap.go
+ if self.config["stage_product"] == "firefox":
+ suffix = "firefox"
+
+ return "{}-{}".format(self.config["bucket_name_prefix"], suffix)
+
+ def _get_file_prefix(self):
+ return "pub/{}/candidates/{}-candidates/build{}/".format(
+ self.config["stage_product"], self.config["version"], self.config["build_number"]
+ )
+
+ def _get_sums_filename(self, format_):
+ return "{}SUMS".format(format_.upper())
+
+ def _get_bucket(self):
+ if not self.bucket:
+ self.activate_virtualenv()
+ from boto.s3.connection import S3Connection
+
+ self.info("Connecting to S3")
+ conn = S3Connection()
+ self.debug("Successfully connected to S3")
+ self.info("Connecting to bucket {}".format(self.bucket_name))
+ self.bucket = conn.get_bucket(self.bucket_name)
+ return self.bucket
+
+ def collect_individual_checksums(self):
+ """This step grabs all of the small checksums files for the release,
+ filters out any unwanted files from within them, and adds the remainder
+ to self.checksums for subsequent steps to use."""
+ bucket = self._get_bucket()
+ self.info("File prefix is: {}".format(self.file_prefix))
+
+ # Temporary holding place for checksums
+ raw_checksums = []
+ def worker(item):
+ self.debug("Downloading {}".format(item))
+ # TODO: It would be nice to download the associated .asc file
+ # and verify against it.
+ sums = bucket.get_key(item).get_contents_as_string()
+ raw_checksums.append(sums)
+
+ def find_checksums_files():
+ self.info("Getting key names from bucket")
+ checksum_files = {"beets": [], "checksums": []}
+ for key in bucket.list(prefix=self.file_prefix):
+ if key.key.endswith(".checksums"):
+ self.debug("Found checksums file: {}".format(key.key))
+ checksum_files["checksums"].append(key.key)
+ elif key.key.endswith(".beet"):
+ self.debug("Found beet file: {}".format(key.key))
+ checksum_files["beets"].append(key.key)
+ else:
+ self.debug("Ignoring non-checksums file: {}".format(key.key))
+ if checksum_files["beets"]:
+ self.log("Using beet format")
+ return checksum_files["beets"]
+ else:
+ self.log("Using checksums format")
+ return checksum_files["checksums"]
+
+ pool = ThreadPool(self.config["parallelization"])
+ pool.map(worker, find_checksums_files())
+
+ for c in raw_checksums:
+ for f, info in parse_checksums_file(c).iteritems():
+ for pattern in self.config["includes"]:
+ if re.search(pattern, f):
+ if f in self.checksums:
+ self.fatal("Found duplicate checksum entry for {}, don't know which one to pick.".format(f))
+ if not set(self.config["formats"]) <= set(info["hashes"]):
+ self.fatal("Missing necessary format for file {}".format(f))
+ self.debug("Adding checksums for file: {}".format(f))
+ self.checksums[f] = info
+ break
+ else:
+ self.debug("Ignoring checksums for file: {}".format(f))
+
+ def create_big_checksums(self):
+ for fmt in self.config["formats"]:
+ sums = self._get_sums_filename(fmt)
+ self.info("Creating big checksums file: {}".format(sums))
+ with open(sums, "w+") as output_file:
+ for fn in sorted(self.checksums):
+ output_file.write("{} {}\n".format(self.checksums[fn]["hashes"][fmt], fn))
+
+ def sign(self):
+ dirs = self.query_abs_dirs()
+
+ tools_dir = path.join(dirs["abs_work_dir"], "tools")
+ self.vcs_checkout(
+ repo=self.config["tools_repo"],
+ branch="default",
+ vcs="hg",
+ dest=tools_dir,
+ )
+
+ sign_cmd = self.query_moz_sign_cmd(formats=["gpg"])
+
+ for fmt in self.config["formats"]:
+ sums = self._get_sums_filename(fmt)
+ self.info("Signing big checksums file: {}".format(sums))
+ retval = self.run_command(sign_cmd + [sums])
+ if retval != 0:
+ self.fatal("Failed to sign {}".format(sums))
+
+ def upload(self):
+ # we need to provide the public side of the gpg key so that people can
+ # verify the detached signatures
+ dirs = self.query_abs_dirs()
+ tools_dir = path.join(dirs["abs_work_dir"], "tools")
+ self.copyfile(os.path.join(tools_dir, 'scripts', 'release', 'KEY'),
+ 'KEY')
+ files = ['KEY']
+
+ for fmt in self.config["formats"]:
+ files.append(self._get_sums_filename(fmt))
+ files.append("{}.asc".format(self._get_sums_filename(fmt)))
+
+ bucket = self._get_bucket()
+ for f in files:
+ dest = posixpath.join(self.file_prefix, f)
+ self.info("Uploading {} to {}".format(f, dest))
+ key = bucket.new_key(dest)
+ key.set_contents_from_filename(f, headers={'Content-Type': 'text/plain'})
+
+ def copy_info_files(self):
+ bucket = self._get_bucket()
+
+ for key in bucket.list(prefix=self.file_prefix):
+ if re.search(r'/en-US/android.*_info\.txt$', key.name):
+ self.info("Found {}".format(key.name))
+ dest = posixpath.join(self.file_prefix, posixpath.basename(key.name))
+ self.info("Copying to {}".format(dest))
+ bucket.copy_key(new_key_name=dest,
+ src_bucket_name=self.bucket_name,
+ src_key_name=key.name,
+ metadata={'Content-Type': 'text/plain'})
+
+
+if __name__ == "__main__":
+ myScript = ChecksumsGenerator()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/release/postrelease_bouncer_aliases.py b/testing/mozharness/scripts/release/postrelease_bouncer_aliases.py
new file mode 100644
index 000000000..78a60b4bc
--- /dev/null
+++ b/testing/mozharness/scripts/release/postrelease_bouncer_aliases.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" postrelease_bouncer_aliases.py
+
+A script to replace the old-fashion way of updating the bouncer aliaes through
+tools script.
+"""
+
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+
+
+# PostReleaseBouncerAliases {{{1
+class PostReleaseBouncerAliases(BaseScript, VirtualenvMixin, BuildbotMixin):
+ config_options = virtualenv_config_options
+
+ def __init__(self, require_config_file=True):
+ super(PostReleaseBouncerAliases, self).__init__(
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ config={
+ "virtualenv_modules": [
+ "redo",
+ "requests",
+ ],
+ "virtualenv_path": "venv",
+ 'credentials_file': 'oauth.txt',
+ 'buildbot_json_path': 'buildprops.json',
+ },
+ all_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "update-bouncer-aliases",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "update-bouncer-aliases",
+ ],
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(PostReleaseBouncerAliases, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ props = self.buildbot_config["properties"]
+ for prop in ['tuxedo_server_url', 'version']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+ else:
+ self.warning("%s could not be found within buildprops" % prop)
+ return
+
+ def _update_bouncer_alias(self, tuxedo_server_url, auth,
+ related_product, alias):
+ from redo import retry
+ import requests
+
+ url = "%s/create_update_alias" % tuxedo_server_url
+ data = {"alias": alias, "related_product": related_product}
+ self.log("Updating {} to point to {} using {}".format(alias,
+ related_product,
+ url))
+
+ # Wrap the real call to hide credentials from retry's logging
+ def do_update_bouncer_alias():
+ r = requests.post(url, data=data, auth=auth,
+ verify=False, timeout=60)
+ r.raise_for_status()
+
+ retry(do_update_bouncer_alias)
+
+ def update_bouncer_aliases(self):
+ tuxedo_server_url = self.config['tuxedo_server_url']
+ credentials_file = os.path.join(os.getcwd(),
+ self.config['credentials_file'])
+ credentials = {}
+ execfile(credentials_file, credentials)
+ auth = (credentials['tuxedoUsername'], credentials['tuxedoPassword'])
+ version = self.config['version']
+ for product, info in self.config["products"].iteritems():
+ if "alias" in info:
+ product_template = info["product-name"]
+ related_product = product_template % {"version": version}
+ self._update_bouncer_alias(tuxedo_server_url, auth,
+ related_product, info["alias"])
+
+
+# __main__ {{{1
+if __name__ == '__main__':
+ PostReleaseBouncerAliases().run_and_exit()
diff --git a/testing/mozharness/scripts/release/postrelease_mark_as_shipped.py b/testing/mozharness/scripts/release/postrelease_mark_as_shipped.py
new file mode 100644
index 000000000..f84b5771c
--- /dev/null
+++ b/testing/mozharness/scripts/release/postrelease_mark_as_shipped.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" postrelease_mark_as_shipped.py
+
+A script to automate the manual way of updating a release as shipped in Ship-it
+following its successful ship-to-the-door opertion.
+"""
+import os
+import sys
+from datetime import datetime
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+
+
+def build_release_name(product, version, buildno):
+ """Function to reconstruct the name of the release based on product,
+ version and buildnumber
+ """
+ return "{}-{}-build{}".format(product.capitalize(),
+ str(version), str(buildno))
+
+
+class MarkReleaseAsShipped(BaseScript, VirtualenvMixin, BuildbotMixin):
+ config_options = virtualenv_config_options
+
+ def __init__(self, require_config_file=True):
+ super(MarkReleaseAsShipped, self).__init__(
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ config={
+ "virtualenv_modules": [
+ "shipitapi",
+ ],
+ "virtualenv_path": "venv",
+ "credentials_file": "oauth.txt",
+ "buildbot_json_path": "buildprops.json",
+ "timeout": 60,
+ },
+ all_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "mark-as-shipped",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "mark-as-shipped",
+ ],
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(MarkReleaseAsShipped, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ props = self.buildbot_config['properties']
+ mandatory_props = ['product', 'version', 'build_number']
+ missing_props = []
+ for prop in mandatory_props:
+ if prop in props:
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+ else:
+ self.warning("%s could not be found within buildprops" % prop)
+ missing_props.append(prop)
+
+ if missing_props:
+ raise Exception("%s not found in configs" % missing_props)
+
+ self.config['name'] = build_release_name(self.config['product'],
+ self.config['version'],
+ self.config['build_number'])
+
+ def mark_as_shipped(self):
+ """Method to make a simple call to Ship-it API to change a release
+ status to 'shipped'
+ """
+ credentials_file = os.path.join(os.getcwd(),
+ self.config["credentials_file"])
+ credentials = {}
+ execfile(credentials_file, credentials)
+ ship_it_credentials = credentials["ship_it_credentials"]
+ auth = (self.config["ship_it_username"],
+ ship_it_credentials.get(self.config["ship_it_username"]))
+ api_root = self.config['ship_it_root']
+
+ from shipitapi import Release
+ release_api = Release(auth, api_root=api_root,
+ timeout=self.config['timeout'])
+ shipped_at = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
+
+ self.info("Mark the release as shipped with %s timestamp" % shipped_at)
+ release_api.update(self.config['name'],
+ status='shipped', shippedAt=shipped_at)
+
+
+if __name__ == '__main__':
+ MarkReleaseAsShipped().run_and_exit()
diff --git a/testing/mozharness/scripts/release/postrelease_version_bump.py b/testing/mozharness/scripts/release/postrelease_version_bump.py
new file mode 100644
index 000000000..dfffa699a
--- /dev/null
+++ b/testing/mozharness/scripts/release/postrelease_version_bump.py
@@ -0,0 +1,184 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" postrelease_version_bump.py
+
+A script to increase in-tree version number after shipping a release.
+"""
+
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.repo_manupulation import MercurialRepoManipulationMixin
+
+
+# PostReleaseVersionBump {{{1
+class PostReleaseVersionBump(MercurialScript, BuildbotMixin,
+ MercurialRepoManipulationMixin):
+ config_options = [
+ [['--hg-user', ], {
+ "action": "store",
+ "dest": "hg_user",
+ "type": "string",
+ "default": "ffxbld <release@mozilla.com>",
+ "help": "Specify what user to use to commit to hg.",
+ }],
+ [['--next-version', ], {
+ "action": "store",
+ "dest": "next_version",
+ "type": "string",
+ "help": "Next version used in version bump",
+ }],
+ [['--ssh-user', ], {
+ "action": "store",
+ "dest": "ssh_user",
+ "type": "string",
+ "help": "SSH username with hg.mozilla.org permissions",
+ }],
+ [['--ssh-key', ], {
+ "action": "store",
+ "dest": "ssh_key",
+ "type": "string",
+ "help": "Path to SSH key.",
+ }],
+ [['--product', ], {
+ "action": "store",
+ "dest": "product",
+ "type": "string",
+ "help": "Product name",
+ }],
+ [['--version', ], {
+ "action": "store",
+ "dest": "version",
+ "type": "string",
+ "help": "Version",
+ }],
+ [['--build-number', ], {
+ "action": "store",
+ "dest": "build_number",
+ "type": "string",
+ "help": "Build number",
+ }],
+ [['--revision', ], {
+ "action": "store",
+ "dest": "revision",
+ "type": "string",
+ "help": "HG revision to tag",
+ }],
+ ]
+
+ def __init__(self, require_config_file=True):
+ super(PostReleaseVersionBump, self).__init__(
+ config_options=self.config_options,
+ all_actions=[
+ 'clobber',
+ 'clean-repos',
+ 'pull',
+ 'bump_postrelease',
+ 'commit-changes',
+ 'tag',
+ 'push',
+ ],
+ default_actions=[
+ 'clean-repos',
+ 'pull',
+ 'bump_postrelease',
+ 'commit-changes',
+ 'tag',
+ 'push',
+ ],
+ config={
+ 'buildbot_json_path': 'buildprops.json',
+ },
+ require_config_file=require_config_file
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(PostReleaseVersionBump, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ else:
+ props = self.buildbot_config["properties"]
+ for prop in ['next_version', 'product', 'version', 'build_number',
+ 'revision']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+
+ if not self.config.get("next_version"):
+ self.fatal("Next version has to be set. Use --next-version or "
+ "pass `next_version' via buildbot properties.")
+
+ def query_abs_dirs(self):
+ """ Allow for abs_from_dir and abs_to_dir
+ """
+ if self.abs_dirs:
+ return self.abs_dirs
+ self.abs_dirs = super(PostReleaseVersionBump, self).query_abs_dirs()
+ self.abs_dirs["abs_gecko_dir"] = os.path.join(
+ self.abs_dirs['abs_work_dir'], self.config["repo"]["dest"])
+ return self.abs_dirs
+
+ def query_repos(self):
+ """Build a list of repos to clone."""
+ return [self.config["repo"]]
+
+ def query_commit_dirs(self):
+ return [self.query_abs_dirs()["abs_gecko_dir"]]
+
+ def query_commit_message(self):
+ return "Automatic version bump. CLOSED TREE NO BUG a=release"
+
+ def query_push_dirs(self):
+ return self.query_commit_dirs()
+
+ def query_push_args(self, cwd):
+ # cwd is not used here
+ hg_ssh_opts = "ssh -l {user} -i {key}".format(
+ user=self.config["ssh_user"],
+ key=os.path.expanduser(self.config["ssh_key"])
+ )
+ return ["-e", hg_ssh_opts, "-r", "."]
+
+ def pull(self):
+ super(PostReleaseVersionBump, self).pull(
+ repos=self.query_repos())
+
+ def bump_postrelease(self, *args, **kwargs):
+ """Bump version"""
+ dirs = self.query_abs_dirs()
+ for f in self.config["version_files"]:
+ curr_version = ".".join(
+ self.get_version(dirs['abs_gecko_dir'], f["file"]))
+ self.replace(os.path.join(dirs['abs_gecko_dir'], f["file"]),
+ curr_version, self.config["next_version"])
+
+ def tag(self):
+ dirs = self.query_abs_dirs()
+ tags = ["{product}_{version}_BUILD{build_number}",
+ "{product}_{version}_RELEASE"]
+ tags = [t.format(product=self.config["product"].upper(),
+ version=self.config["version"].replace(".", "_"),
+ build_number=self.config["build_number"])
+ for t in tags]
+ message = "No bug - Tagging {revision} with {tags} a=release CLOSED TREE"
+ message = message.format(
+ revision=self.config["revision"],
+ tags=', '.join(tags))
+ self.hg_tag(cwd=dirs["abs_gecko_dir"], tags=tags,
+ revision=self.config["revision"], message=message,
+ user=self.config["hg_user"], force=True)
+
+# __main__ {{{1
+if __name__ == '__main__':
+ PostReleaseVersionBump().run_and_exit()
diff --git a/testing/mozharness/scripts/release/publish_balrog.py b/testing/mozharness/scripts/release/publish_balrog.py
new file mode 100644
index 000000000..edb381311
--- /dev/null
+++ b/testing/mozharness/scripts/release/publish_balrog.py
@@ -0,0 +1,119 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" updates.py
+
+A script publish a release to Balrog.
+
+"""
+
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+
+# PublishBalrog {{{1
+
+
+class PublishBalrog(MercurialScript, BuildbotMixin):
+
+ def __init__(self, require_config_file=True):
+ super(PublishBalrog, self).__init__(
+ all_actions=[
+ 'clobber',
+ 'pull',
+ 'submit-to-balrog',
+ ],
+ default_actions=[
+ 'clobber',
+ 'pull',
+ 'submit-to-balrog',
+ ],
+ config={
+ 'buildbot_json_path': 'buildprops.json',
+ 'credentials_file': 'oauth.txt',
+ },
+ require_config_file=require_config_file
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(PublishBalrog, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ # TODO: version and appVersion should come from repo
+ props = self.buildbot_config["properties"]
+ for prop in ['product', 'version', 'build_number', 'channels',
+ 'balrog_api_root', 'schedule_at', 'background_rate']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ self.abs_dirs = super(PublishBalrog, self).query_abs_dirs()
+ self.abs_dirs["abs_tools_dir"] = os.path.join(
+ self.abs_dirs['abs_work_dir'], self.config["repo"]["dest"])
+ return self.abs_dirs
+
+ def query_channel_configs(self):
+ """Return a list of channel configs.
+ For RC builds it returns "release" and "beta" using
+ "enabled_if_version_matches" to match RC.
+
+ :return: list
+ """
+ return [(n, c) for n, c in self.config["update_channels"].items() if
+ n in self.config["channels"]]
+
+ def query_repos(self):
+ """Build a list of repos to clone."""
+ return [self.config["repo"]]
+
+ def pull(self):
+ super(PublishBalrog, self).pull(
+ repos=self.query_repos())
+
+
+ def submit_to_balrog(self):
+ for _, channel_config in self.query_channel_configs():
+ self._submit_to_balrog(channel_config)
+
+ def _submit_to_balrog(self, channel_config):
+ dirs = self.query_abs_dirs()
+ auth = os.path.join(os.getcwd(), self.config['credentials_file'])
+ cmd = [
+ self.query_exe("python"),
+ os.path.join(dirs["abs_tools_dir"],
+ "scripts/build-promotion/balrog-release-shipper.py")]
+ cmd.extend([
+ "--api-root", self.config["balrog_api_root"],
+ "--credentials-file", auth,
+ "--username", self.config["balrog_username"],
+ "--version", self.config["version"],
+ "--product", self.config["product"],
+ "--build-number", str(self.config["build_number"]),
+ "--verbose",
+ ])
+ for r in channel_config["publish_rules"]:
+ cmd.extend(["--rules", r])
+ if self.config.get("schedule_at"):
+ cmd.extend(["--schedule-at", self.config["schedule_at"]])
+ if self.config.get("background_rate"):
+ cmd.extend(["--background-rate", str(self.config["background_rate"])])
+
+ self.retry(lambda: self.run_command(cmd, halt_on_failure=True))
+
+# __main__ {{{1
+if __name__ == '__main__':
+ PublishBalrog().run_and_exit()
diff --git a/testing/mozharness/scripts/release/push-candidate-to-releases.py b/testing/mozharness/scripts/release/push-candidate-to-releases.py
new file mode 100644
index 000000000..5339fa38a
--- /dev/null
+++ b/testing/mozharness/scripts/release/push-candidate-to-releases.py
@@ -0,0 +1,200 @@
+from multiprocessing.pool import ThreadPool
+import os
+import re
+import sys
+
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.aws import pop_aws_auth_from_env
+
+
+class ReleasePusher(BaseScript, VirtualenvMixin):
+ config_options = [
+ [["--product"], {
+ "dest": "product",
+ "help": "Product being released, eg: firefox, thunderbird",
+ }],
+ [["--version"], {
+ "dest": "version",
+ "help": "Version of release, eg: 39.0b5",
+ }],
+ [["--build-number"], {
+ "dest": "build_number",
+ "help": "Build number of release, eg: 2",
+ }],
+ [["--bucket-name"], {
+ "dest": "bucket_name",
+ "help": "Bucket to copy files from candidates/ to releases/",
+ }],
+ [["--credentials"], {
+ "dest": "credentials",
+ "help": "File containing access key and secret access key",
+ }],
+ [["--exclude"], {
+ "dest": "excludes",
+ "default": [
+ r"^.*tests.*$",
+ r"^.*crashreporter.*$",
+ r"^.*[^k]\.zip(\.asc)?$",
+ r"^.*\.log$",
+ r"^.*\.txt$",
+ r"^.*/partner-repacks.*$",
+ r"^.*.checksums(\.asc)?$",
+ r"^.*/logs/.*$",
+ r"^.*/jsshell.*$",
+ r"^.*json$",
+ r"^.*/host.*$",
+ r"^.*/mar-tools/.*$",
+ r"^.*robocop.apk$",
+ r"^.*bouncer.apk$",
+ r"^.*contrib.*",
+ r"^.*/beetmover-checksums/.*$",
+ ],
+ "action": "append",
+ "help": "List of patterns to exclude from copy. The list can be "
+ "extended by passing multiple --exclude arguments.",
+ }],
+ [["-j", "--parallelization"], {
+ "dest": "parallelization",
+ "default": 20,
+ "type": "int",
+ "help": "Number of copy requests to run concurrently",
+ }],
+ ] + virtualenv_config_options
+
+ def __init__(self, aws_creds):
+ BaseScript.__init__(self,
+ config_options=self.config_options,
+ require_config_file=False,
+ config={
+ "virtualenv_modules": [
+ "boto",
+ "redo",
+ ],
+ "virtualenv_path": "venv",
+ },
+ all_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "push-to-releases",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "push-to-releases",
+ ],
+ )
+
+ # validate aws credentials
+ if not (all(aws_creds) or self.config.get('credentials')):
+ self.fatal("aws creds not defined. please add them to your config or env.")
+ if any(aws_creds) and self.config.get('credentials'):
+ self.fatal("aws creds found in env and self.config. please declare in one place only.")
+
+ # set aws credentials
+ if all(aws_creds):
+ self.aws_key_id, self.aws_secret_key = aws_creds
+ else: # use
+ self.aws_key_id, self.aws_secret_key = None, None
+ # set the env var for boto to read our special config file
+ # rather than anything else we have at ~/.boto
+ os.environ["BOTO_CONFIG"] = os.path.abspath(self.config["credentials"])
+
+ def _get_candidates_prefix(self):
+ return "pub/{}/candidates/{}-candidates/build{}/".format(
+ self.config['product'],
+ self.config["version"],
+ self.config["build_number"]
+ )
+
+ def _get_releases_prefix(self):
+ return "pub/{}/releases/{}/".format(
+ self.config["product"],
+ self.config["version"]
+ )
+
+ def _matches_exclude(self, keyname):
+ for exclude in self.config["excludes"]:
+ if re.search(exclude, keyname):
+ return True
+ return False
+
+ def push_to_releases(self):
+ """This step grabs the list of files in the candidates dir,
+ filters out any unwanted files from within them, and copies
+ the remainder."""
+ from boto.s3.connection import S3Connection
+ from boto.exception import S3CopyError, S3ResponseError
+ from redo import retry
+
+ # suppress boto debug logging, it's too verbose with --loglevel=debug
+ import logging
+ logging.getLogger('boto').setLevel(logging.INFO)
+
+ self.info("Connecting to S3")
+ conn = S3Connection(aws_access_key_id=self.aws_key_id,
+ aws_secret_access_key=self.aws_secret_key)
+ self.info("Getting bucket {}".format(self.config["bucket_name"]))
+ bucket = conn.get_bucket(self.config["bucket_name"])
+
+ # ensure the destination is empty
+ self.info("Checking destination {} is empty".format(self._get_releases_prefix()))
+ keys = [k for k in bucket.list(prefix=self._get_releases_prefix())]
+ if keys:
+ self.warning("Destination already exists with %s keys" % len(keys))
+
+ def worker(item):
+ source, destination = item
+
+ def copy_key():
+ source_key = bucket.get_key(source)
+ dest_key = bucket.get_key(destination)
+ # According to http://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html
+ # S3 key MD5 is represented as ETag, except when objects are
+ # uploaded using multipart method. In this case objects's ETag
+ # is constructed using its MD5, minus symbol, and number of
+ # part. See http://stackoverflow.com/questions/12186993/what-is-the-algorithm-to-compute-the-amazon-s3-etag-for-a-file-larger-than-5gb#answer-19896823
+ source_md5 = source_key.etag.split("-")[0]
+ if dest_key:
+ dest_md5 = dest_key.etag.split("-")[0]
+ else:
+ dest_md5 = None
+
+ if not dest_key:
+ self.info("Copying {} to {}".format(source, destination))
+ bucket.copy_key(destination, self.config["bucket_name"],
+ source)
+ elif source_md5 == dest_md5:
+ self.warning(
+ "{} already exists with the same content ({}), skipping copy".format(
+ destination, dest_md5))
+ else:
+ self.fatal(
+ "{} already exists with the different content (src ETag: {}, dest ETag: {}), aborting".format(
+ destination, source_key.etag, dest_key.etag))
+
+ return retry(copy_key, sleeptime=5, max_sleeptime=60,
+ retry_exceptions=(S3CopyError, S3ResponseError))
+
+ def find_release_files():
+ candidates_prefix = self._get_candidates_prefix()
+ release_prefix = self._get_releases_prefix()
+ self.info("Getting key names from candidates")
+ for key in bucket.list(prefix=candidates_prefix):
+ keyname = key.name
+ if self._matches_exclude(keyname):
+ self.debug("Excluding {}".format(keyname))
+ else:
+ destination = keyname.replace(candidates_prefix,
+ release_prefix)
+ yield (keyname, destination)
+
+ pool = ThreadPool(self.config["parallelization"])
+ pool.map(worker, find_release_files())
+
+if __name__ == "__main__":
+ myScript = ReleasePusher(pop_aws_auth_from_env())
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/release/updates.py b/testing/mozharness/scripts/release/updates.py
new file mode 100644
index 000000000..4b660a67b
--- /dev/null
+++ b/testing/mozharness/scripts/release/updates.py
@@ -0,0 +1,299 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" updates.py
+
+A script to bump patcher configs, generate update verification configs, and
+publish top-level release blob information to Balrog.
+
+It clones the tools repo, modifies the existing patcher config to include
+current release build information, generates update verification configs,
+commits the changes and tags the repo using tags by Releng convention.
+After the changes are pushed to the repo, the script submits top-level release
+information to Balrog.
+"""
+
+import os
+import re
+import sys
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.repo_manupulation import MercurialRepoManipulationMixin
+from mozharness.mozilla.release import get_previous_version
+
+
+# UpdatesBumper {{{1
+class UpdatesBumper(MercurialScript, BuildbotMixin,
+ MercurialRepoManipulationMixin):
+ config_options = [
+ [['--hg-user', ], {
+ "action": "store",
+ "dest": "hg_user",
+ "type": "string",
+ "default": "ffxbld <release@mozilla.com>",
+ "help": "Specify what user to use to commit to hg.",
+ }],
+ [['--ssh-user', ], {
+ "action": "store",
+ "dest": "ssh_user",
+ "type": "string",
+ "help": "SSH username with hg.mozilla.org permissions",
+ }],
+ [['--ssh-key', ], {
+ "action": "store",
+ "dest": "ssh_key",
+ "type": "string",
+ "help": "Path to SSH key.",
+ }],
+ ]
+
+ def __init__(self, require_config_file=True):
+ super(UpdatesBumper, self).__init__(
+ config_options=self.config_options,
+ all_actions=[
+ 'clobber',
+ 'pull',
+ 'download-shipped-locales',
+ 'bump-configs',
+ 'commit-changes',
+ 'tag',
+ 'push',
+ 'submit-to-balrog',
+ ],
+ default_actions=[
+ 'clobber',
+ 'pull',
+ 'download-shipped-locales',
+ 'bump-configs',
+ 'commit-changes',
+ 'tag',
+ 'push',
+ 'submit-to-balrog',
+ ],
+ config={
+ 'buildbot_json_path': 'buildprops.json',
+ 'credentials_file': 'oauth.txt',
+ },
+ require_config_file=require_config_file
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(UpdatesBumper, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ # TODO: version and appVersion should come from repo
+ props = self.buildbot_config["properties"]
+ for prop in ['product', 'version', 'build_number', 'revision',
+ 'appVersion', 'balrog_api_root', "channels"]:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+
+ partials = [v.strip() for v in props["partial_versions"].split(",")]
+ self.config["partial_versions"] = [v.split("build") for v in partials]
+ self.config["platforms"] = [p.strip() for p in
+ props["platforms"].split(",")]
+ self.config["channels"] = [c.strip() for c in
+ props["channels"].split(",")]
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ self.abs_dirs = super(UpdatesBumper, self).query_abs_dirs()
+ self.abs_dirs["abs_tools_dir"] = os.path.join(
+ self.abs_dirs['abs_work_dir'], self.config["repo"]["dest"])
+ return self.abs_dirs
+
+ def query_repos(self):
+ """Build a list of repos to clone."""
+ return [self.config["repo"]]
+
+ def query_commit_dirs(self):
+ return [self.query_abs_dirs()["abs_tools_dir"]]
+
+ def query_commit_message(self):
+ return "Automated configuration bump"
+
+ def query_push_dirs(self):
+ return self.query_commit_dirs()
+
+ def query_push_args(self, cwd):
+ # cwd is not used here
+ hg_ssh_opts = "ssh -l {user} -i {key}".format(
+ user=self.config["ssh_user"],
+ key=os.path.expanduser(self.config["ssh_key"])
+ )
+ return ["-e", hg_ssh_opts]
+
+ def query_shipped_locales_path(self):
+ dirs = self.query_abs_dirs()
+ return os.path.join(dirs["abs_work_dir"], "shipped-locales")
+
+ def query_channel_configs(self):
+ """Return a list of channel configs.
+ For RC builds it returns "release" and "beta" using
+ "enabled_if_version_matches" to match RC.
+
+ :return: list
+ """
+ return [(n, c) for n, c in self.config["update_channels"].items() if
+ n in self.config["channels"]]
+
+ def pull(self):
+ super(UpdatesBumper, self).pull(
+ repos=self.query_repos())
+
+ def download_shipped_locales(self):
+ dirs = self.query_abs_dirs()
+ self.mkdir_p(dirs["abs_work_dir"])
+ url = self.config["shipped-locales-url"].format(
+ revision=self.config["revision"])
+ if not self.download_file(url=url,
+ file_name=self.query_shipped_locales_path()):
+ self.fatal("Unable to fetch shipped-locales from %s" % url)
+
+ def bump_configs(self):
+ for channel, channel_config in self.query_channel_configs():
+ self.bump_patcher_config(channel_config)
+ self.bump_update_verify_configs(channel, channel_config)
+
+ def query_matching_partials(self, channel_config):
+ return [(v, b) for v, b in self.config["partial_versions"] if
+ re.match(channel_config["version_regex"], v)]
+
+ def query_patcher_config(self, channel_config):
+ dirs = self.query_abs_dirs()
+ patcher_config = os.path.join(
+ dirs["abs_tools_dir"], "release/patcher-configs",
+ channel_config["patcher_config"])
+ return patcher_config
+
+ def query_update_verify_config(self, channel, platform):
+ dirs = self.query_abs_dirs()
+ uvc = os.path.join(
+ dirs["abs_tools_dir"], "release/updates",
+ "{}-{}-{}.cfg".format(channel, self.config["product"], platform))
+ return uvc
+
+ def bump_patcher_config(self, channel_config):
+ # TODO: to make it possible to run this before we have files copied to
+ # the candidates directory, we need to add support to fetch build IDs
+ # from tasks.
+ dirs = self.query_abs_dirs()
+ env = {"PERL5LIB": os.path.join(dirs["abs_tools_dir"], "lib/perl")}
+ partial_versions = [v[0] for v in
+ self.query_matching_partials(channel_config)]
+ script = os.path.join(
+ dirs["abs_tools_dir"], "release/patcher-config-bump.pl")
+ patcher_config = self.query_patcher_config(channel_config)
+ cmd = [self.query_exe("perl"), script]
+ cmd.extend([
+ "-p", self.config["product"],
+ "-r", self.config["product"].capitalize(),
+ "-v", self.config["version"],
+ "-a", self.config["appVersion"],
+ "-o", get_previous_version(
+ self.config["version"], partial_versions),
+ "-b", str(self.config["build_number"]),
+ "-c", patcher_config,
+ "-f", self.config["archive_domain"],
+ "-d", self.config["download_domain"],
+ "-l", self.query_shipped_locales_path(),
+ ])
+ for v in partial_versions:
+ cmd.extend(["--partial-version", v])
+ for p in self.config["platforms"]:
+ cmd.extend(["--platform", p])
+ for mar_channel_id in channel_config["mar_channel_ids"]:
+ cmd.extend(["--mar-channel-id", mar_channel_id])
+ self.run_command(cmd, halt_on_failure=True, env=env)
+
+ def bump_update_verify_configs(self, channel, channel_config):
+ dirs = self.query_abs_dirs()
+ script = os.path.join(
+ dirs["abs_tools_dir"],
+ "scripts/build-promotion/create-update-verify-config.py")
+ patcher_config = self.query_patcher_config(channel_config)
+ for platform in self.config["platforms"]:
+ cmd = [self.query_exe("python"), script]
+ output = self.query_update_verify_config(channel, platform)
+ cmd.extend([
+ "--config", patcher_config,
+ "--platform", platform,
+ "--update-verify-channel",
+ channel_config["update_verify_channel"],
+ "--output", output,
+ "--archive-prefix", self.config["archive_prefix"],
+ "--previous-archive-prefix",
+ self.config["previous_archive_prefix"],
+ "--product", self.config["product"],
+ "--balrog-url", self.config["balrog_url"],
+ "--build-number", str(self.config["build_number"]),
+ ])
+
+ self.run_command(cmd, halt_on_failure=True)
+
+ def tag(self):
+ dirs = self.query_abs_dirs()
+ tags = ["{product}_{version}_BUILD{build_number}_RUNTIME",
+ "{product}_{version}_RELEASE_RUNTIME"]
+ tags = [t.format(product=self.config["product"].upper(),
+ version=self.config["version"].replace(".", "_"),
+ build_number=self.config["build_number"])
+ for t in tags]
+ self.hg_tag(cwd=dirs["abs_tools_dir"], tags=tags,
+ user=self.config["hg_user"], force=True)
+
+ def submit_to_balrog(self):
+ for _, channel_config in self.query_channel_configs():
+ self._submit_to_balrog(channel_config)
+
+ def _submit_to_balrog(self, channel_config):
+ dirs = self.query_abs_dirs()
+ auth = os.path.join(os.getcwd(), self.config['credentials_file'])
+ cmd = [
+ self.query_exe("python"),
+ os.path.join(dirs["abs_tools_dir"],
+ "scripts/build-promotion/balrog-release-pusher.py")]
+ cmd.extend([
+ "--api-root", self.config["balrog_api_root"],
+ "--download-domain", self.config["download_domain"],
+ "--archive-domain", self.config["archive_domain"],
+ "--credentials-file", auth,
+ "--product", self.config["product"],
+ "--version", self.config["version"],
+ "--build-number", str(self.config["build_number"]),
+ "--app-version", self.config["appVersion"],
+ "--username", self.config["balrog_username"],
+ "--verbose",
+ ])
+ for c in channel_config["channel_names"]:
+ cmd.extend(["--channel", c])
+ for r in channel_config["rules_to_update"]:
+ cmd.extend(["--rule-to-update", r])
+ for p in self.config["platforms"]:
+ cmd.extend(["--platform", p])
+ for v, build_number in self.query_matching_partials(channel_config):
+ partial = "{version}build{build_number}".format(
+ version=v, build_number=build_number)
+ cmd.extend(["--partial-update", partial])
+ if channel_config["requires_mirrors"]:
+ cmd.append("--requires-mirrors")
+ if self.config["balrog_use_dummy_suffix"]:
+ cmd.append("--dummy")
+
+ self.retry(lambda: self.run_command(cmd, halt_on_failure=True))
+
+# __main__ {{{1
+if __name__ == '__main__':
+ UpdatesBumper().run_and_exit()
diff --git a/testing/mozharness/scripts/release/uptake_monitoring.py b/testing/mozharness/scripts/release/uptake_monitoring.py
new file mode 100644
index 000000000..9ec24621f
--- /dev/null
+++ b/testing/mozharness/scripts/release/uptake_monitoring.py
@@ -0,0 +1,188 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" uptake_monitoring.py
+
+A script to replace the old-fashion way of computing the uptake monitoring
+from the scheduler within the slaves.
+"""
+
+import os
+import sys
+import datetime
+import time
+import xml.dom.minidom
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+
+
+def get_tuxedo_uptake_url(tuxedo_server_url, related_product, os):
+ return '%s/uptake/?product=%s&os=%s' % (tuxedo_server_url,
+ related_product, os)
+
+
+class UptakeMonitoring(BaseScript, VirtualenvMixin, BuildbotMixin):
+ config_options = virtualenv_config_options
+
+ def __init__(self, require_config_file=True):
+ super(UptakeMonitoring, self).__init__(
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ config={
+ "virtualenv_modules": [
+ "redo",
+ "requests",
+ ],
+
+ "virtualenv_path": "venv",
+ "credentials_file": "oauth.txt",
+ "buildbot_json_path": "buildprops.json",
+ "poll_interval": 60,
+ "poll_timeout": 20*60,
+ "min_uptake": 10000,
+ },
+ all_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "monitor-uptake",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "monitor-uptake",
+ ],
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(UptakeMonitoring, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ props = self.buildbot_config["properties"]
+ for prop in ['tuxedo_server_url', 'version']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+ else:
+ self.warning("%s could not be found within buildprops" % prop)
+ return
+ partials = [v.strip() for v in props["partial_versions"].split(",")]
+ self.config["partial_versions"] = [v.split("build")[0] for v in partials]
+ self.config["platforms"] = [p.strip() for p in
+ props["platforms"].split(",")]
+
+ def _get_product_uptake(self, tuxedo_server_url, auth,
+ related_product, os):
+ from redo import retry
+ import requests
+
+ url = get_tuxedo_uptake_url(tuxedo_server_url, related_product, os)
+ self.info("Requesting {} from tuxedo".format(url))
+
+ def get_tuxedo_page():
+ r = requests.get(url, auth=auth,
+ verify=False, timeout=60)
+ r.raise_for_status()
+ return r.content
+
+ def calculateUptake(page):
+ doc = xml.dom.minidom.parseString(page)
+ uptake_values = []
+
+ for element in doc.getElementsByTagName('available'):
+ for node in element.childNodes:
+ if node.nodeType == xml.dom.minidom.Node.TEXT_NODE and \
+ node.data.isdigit():
+ uptake_values.append(int(node.data))
+ if not uptake_values:
+ uptake_values = [0]
+ return min(uptake_values)
+
+ page = retry(get_tuxedo_page)
+ uptake = calculateUptake(page)
+ self.info("Current uptake for {} is {}".format(related_product, uptake))
+ return uptake
+
+ def _get_release_uptake(self, auth):
+ assert isinstance(self.config["platforms"], (list, tuple))
+
+ # handle the products first
+ tuxedo_server_url = self.config["tuxedo_server_url"]
+ version = self.config["version"]
+ dl = []
+
+ for product, info in self.config["products"].iteritems():
+ if info.get("check_uptake"):
+ product_template = info["product-name"]
+ related_product = product_template % {"version": version}
+
+ enUS_platforms = set(self.config["platforms"])
+ paths_platforms = set(info["paths"].keys())
+ platforms = enUS_platforms.intersection(paths_platforms)
+
+ for platform in platforms:
+ bouncer_platform = info["paths"].get(platform).get('bouncer-platform')
+ dl.append(self._get_product_uptake(tuxedo_server_url, auth,
+ related_product, bouncer_platform))
+ # handle the partials as well
+ prev_versions = self.config["partial_versions"]
+ for product, info in self.config["partials"].iteritems():
+ if info.get("check_uptake"):
+ product_template = info["product-name"]
+ for prev_version in prev_versions:
+ subs = {
+ "version": version,
+ "prev_version": prev_version
+ }
+ related_product = product_template % subs
+
+ enUS_platforms = set(self.config["platforms"])
+ paths_platforms = set(info["paths"].keys())
+ platforms = enUS_platforms.intersection(paths_platforms)
+
+ for platform in platforms:
+ bouncer_platform = info["paths"].get(platform).get('bouncer-platform')
+ dl.append(self._get_product_uptake(tuxedo_server_url, auth,
+ related_product, bouncer_platform))
+ return min(dl)
+
+ def monitor_uptake(self):
+ credentials_file = os.path.join(os.getcwd(),
+ self.config["credentials_file"])
+ credentials = {}
+ execfile(credentials_file, credentials)
+ auth = (credentials['tuxedoUsername'], credentials['tuxedoPassword'])
+ self.info("Starting the loop to determine the uptake monitoring ...")
+
+ start_time = datetime.datetime.now()
+ while True:
+ delta = (datetime.datetime.now() - start_time).seconds
+ if delta > self.config["poll_timeout"]:
+ self.error("Uptake monitoring sadly timed-out")
+ raise Exception("Time-out during uptake monitoring")
+
+ uptake = self._get_release_uptake(auth)
+ self.info("Current uptake value to check is {}".format(uptake))
+
+ if uptake >= self.config["min_uptake"]:
+ self.info("Uptake monitoring is complete!")
+ break
+ else:
+ self.info("Mirrors not yet updated, sleeping for a bit ...")
+ time.sleep(self.config["poll_interval"])
+
+
+if __name__ == '__main__':
+ myScript = UptakeMonitoring()
+ myScript.run_and_exit()