summaryrefslogtreecommitdiffstats
path: root/testing/mozharness/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'testing/mozharness/scripts')
-rw-r--r--testing/mozharness/scripts/android_emulator_unittest.py755
-rwxr-xr-xtesting/mozharness/scripts/bouncer_submitter.py192
-rwxr-xr-xtesting/mozharness/scripts/configtest.py142
-rwxr-xr-xtesting/mozharness/scripts/desktop_l10n.py1152
-rwxr-xr-xtesting/mozharness/scripts/desktop_partner_repacks.py198
-rwxr-xr-xtesting/mozharness/scripts/desktop_unittest.py742
-rw-r--r--testing/mozharness/scripts/firefox_media_tests_buildbot.py122
-rwxr-xr-xtesting/mozharness/scripts/firefox_media_tests_jenkins.py48
-rw-r--r--testing/mozharness/scripts/firefox_media_tests_taskcluster.py110
-rwxr-xr-xtesting/mozharness/scripts/firefox_ui_tests/functional.py20
-rwxr-xr-xtesting/mozharness/scripts/firefox_ui_tests/update.py20
-rwxr-xr-xtesting/mozharness/scripts/firefox_ui_tests/update_release.py323
-rwxr-xr-xtesting/mozharness/scripts/fx_desktop_build.py235
-rwxr-xr-xtesting/mozharness/scripts/gaia_build_integration.py56
-rwxr-xr-xtesting/mozharness/scripts/gaia_build_unit.py56
-rw-r--r--testing/mozharness/scripts/gaia_integration.py75
-rwxr-xr-xtesting/mozharness/scripts/gaia_linter.py148
-rwxr-xr-xtesting/mozharness/scripts/gaia_unit.py109
-rwxr-xr-xtesting/mozharness/scripts/marionette.py358
-rw-r--r--testing/mozharness/scripts/marionette_harness_tests.py141
-rwxr-xr-xtesting/mozharness/scripts/merge_day/gecko_migration.py545
-rwxr-xr-xtesting/mozharness/scripts/mobile_l10n.py714
-rwxr-xr-xtesting/mozharness/scripts/mobile_partner_repack.py327
-rwxr-xr-xtesting/mozharness/scripts/multil10n.py21
-rw-r--r--testing/mozharness/scripts/openh264_build.py250
-rw-r--r--testing/mozharness/scripts/release/antivirus.py193
-rwxr-xr-xtesting/mozharness/scripts/release/beet_mover.py372
-rw-r--r--testing/mozharness/scripts/release/generate-checksums.py284
-rw-r--r--testing/mozharness/scripts/release/postrelease_bouncer_aliases.py107
-rw-r--r--testing/mozharness/scripts/release/postrelease_mark_as_shipped.py110
-rw-r--r--testing/mozharness/scripts/release/postrelease_version_bump.py184
-rw-r--r--testing/mozharness/scripts/release/publish_balrog.py119
-rw-r--r--testing/mozharness/scripts/release/push-candidate-to-releases.py200
-rw-r--r--testing/mozharness/scripts/release/updates.py299
-rw-r--r--testing/mozharness/scripts/release/uptake_monitoring.py188
-rwxr-xr-xtesting/mozharness/scripts/spidermonkey/build.b2g8
-rwxr-xr-xtesting/mozharness/scripts/spidermonkey/build.browser10
-rwxr-xr-xtesting/mozharness/scripts/spidermonkey/build.shell6
-rwxr-xr-xtesting/mozharness/scripts/spidermonkey_build.py482
-rwxr-xr-xtesting/mozharness/scripts/talos_script.py21
-rwxr-xr-xtesting/mozharness/scripts/web_platform_tests.py258
41 files changed, 9700 insertions, 0 deletions
diff --git a/testing/mozharness/scripts/android_emulator_unittest.py b/testing/mozharness/scripts/android_emulator_unittest.py
new file mode 100644
index 000000000..2d17b9cb6
--- /dev/null
+++ b/testing/mozharness/scripts/android_emulator_unittest.py
@@ -0,0 +1,755 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import copy
+import datetime
+import glob
+import os
+import re
+import sys
+import signal
+import socket
+import subprocess
+import telnetlib
+import time
+import tempfile
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozprocess import ProcessHandler
+
+from mozharness.base.log import FATAL
+from mozharness.base.script import BaseScript, PreScriptAction, PostScriptAction
+from mozharness.base.vcs.vcsbase import VCSMixin
+from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
+from mozharness.mozilla.mozbase import MozbaseMixin
+from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
+from mozharness.mozilla.testing.unittest import EmulatorMixin
+
+
+class AndroidEmulatorTest(BlobUploadMixin, TestingMixin, EmulatorMixin, VCSMixin, BaseScript, MozbaseMixin):
+ config_options = [[
+ ["--test-suite"],
+ {"action": "store",
+ "dest": "test_suite",
+ }
+ ], [
+ ["--adb-path"],
+ {"action": "store",
+ "dest": "adb_path",
+ "default": None,
+ "help": "Path to adb",
+ }
+ ], [
+ ["--total-chunk"],
+ {"action": "store",
+ "dest": "total_chunks",
+ "default": None,
+ "help": "Number of total chunks",
+ }
+ ], [
+ ["--this-chunk"],
+ {"action": "store",
+ "dest": "this_chunk",
+ "default": None,
+ "help": "Number of this chunk",
+ }
+ ]] + copy.deepcopy(testing_config_options) + \
+ copy.deepcopy(blobupload_config_options)
+
+ error_list = [
+ ]
+
+ virtualenv_requirements = [
+ ]
+
+ virtualenv_modules = [
+ ]
+
+ app_name = None
+
+ def __init__(self, require_config_file=False):
+ super(AndroidEmulatorTest, self).__init__(
+ config_options=self.config_options,
+ all_actions=['clobber',
+ 'read-buildbot-config',
+ 'setup-avds',
+ 'start-emulator',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'verify-emulator',
+ 'install',
+ 'run-tests',
+ ],
+ default_actions=['clobber',
+ 'start-emulator',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'verify-emulator',
+ 'install',
+ 'run-tests',
+ ],
+ require_config_file=require_config_file,
+ config={
+ 'virtualenv_modules': self.virtualenv_modules,
+ 'virtualenv_requirements': self.virtualenv_requirements,
+ 'require_test_zip': True,
+ # IP address of the host as seen from the emulator
+ 'remote_webserver': '10.0.2.2',
+ }
+ )
+
+ # these are necessary since self.config is read only
+ c = self.config
+ abs_dirs = self.query_abs_dirs()
+ self.adb_path = self.query_exe('adb')
+ self.installer_url = c.get('installer_url')
+ self.installer_path = c.get('installer_path')
+ self.test_url = c.get('test_url')
+ self.test_packages_url = c.get('test_packages_url')
+ self.test_manifest = c.get('test_manifest')
+ self.robocop_path = os.path.join(abs_dirs['abs_work_dir'], "robocop.apk")
+ self.minidump_stackwalk_path = c.get("minidump_stackwalk_path")
+ self.emulator = c.get('emulator')
+ self.test_suite = c.get('test_suite')
+ self.this_chunk = c.get('this_chunk')
+ self.total_chunks = c.get('total_chunks')
+ if self.test_suite not in self.config["suite_definitions"]:
+ # accept old-style test suite name like "mochitest-3"
+ m = re.match("(.*)-(\d*)", self.test_suite)
+ if m:
+ self.test_suite = m.group(1)
+ if self.this_chunk is None:
+ self.this_chunk = m.group(2)
+ self.sdk_level = None
+ self.xre_path = None
+
+ def _query_tests_dir(self):
+ dirs = self.query_abs_dirs()
+ try:
+ test_dir = self.config["suite_definitions"][self.test_suite]["testsdir"]
+ except:
+ test_dir = self.test_suite
+ return os.path.join(dirs['abs_test_install_dir'], test_dir)
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(AndroidEmulatorTest, self).query_abs_dirs()
+ dirs = {}
+ dirs['abs_test_install_dir'] = os.path.join(
+ abs_dirs['abs_work_dir'], 'tests')
+ dirs['abs_xre_dir'] = os.path.join(
+ abs_dirs['abs_work_dir'], 'hostutils')
+ dirs['abs_modules_dir'] = os.path.join(
+ dirs['abs_test_install_dir'], 'modules')
+ dirs['abs_blob_upload_dir'] = os.path.join(
+ abs_dirs['abs_work_dir'], 'blobber_upload_dir')
+ dirs['abs_emulator_dir'] = abs_dirs['abs_work_dir']
+ dirs['abs_mochitest_dir'] = os.path.join(
+ dirs['abs_test_install_dir'], 'mochitest')
+ dirs['abs_marionette_dir'] = os.path.join(
+ dirs['abs_test_install_dir'], 'marionette', 'harness', 'marionette_harness')
+ dirs['abs_marionette_tests_dir'] = os.path.join(
+ dirs['abs_test_install_dir'], 'marionette', 'tests', 'testing',
+ 'marionette', 'harness', 'marionette_harness', 'tests')
+ dirs['abs_avds_dir'] = self.config.get("avds_dir", "/home/cltbld/.android")
+
+ for key in dirs.keys():
+ if key not in abs_dirs:
+ abs_dirs[key] = dirs[key]
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ @PreScriptAction('create-virtualenv')
+ def _pre_create_virtualenv(self, action):
+ dirs = self.query_abs_dirs()
+ requirements = None
+ if os.path.isdir(dirs['abs_mochitest_dir']):
+ # mochitest is the only thing that needs this
+ requirements = os.path.join(dirs['abs_mochitest_dir'],
+ 'websocketprocessbridge',
+ 'websocketprocessbridge_requirements.txt')
+ elif self.test_suite == 'marionette':
+ requirements = os.path.join(dirs['abs_test_install_dir'],
+ 'config', 'marionette_requirements.txt')
+ if requirements:
+ self.register_virtualenv_module(requirements=[requirements],
+ two_pass=True)
+
+ def _launch_emulator(self):
+ env = self.query_env()
+
+ # Set $LD_LIBRARY_PATH to self.dirs['abs_work_dir'] so that
+ # the emulator picks up the symlink to libGL.so.1 that we
+ # constructed in start_emulator.
+ env['LD_LIBRARY_PATH'] = self.abs_dirs['abs_work_dir']
+
+ # Set environment variables to help emulator find the AVD.
+ # In newer versions of the emulator, ANDROID_AVD_HOME should
+ # point to the 'avd' directory.
+ # For older versions of the emulator, ANDROID_SDK_HOME should
+ # point to the directory containing the '.android' directory
+ # containing the 'avd' directory.
+ avd_home_dir = self.abs_dirs['abs_avds_dir']
+ env['ANDROID_AVD_HOME'] = os.path.join(avd_home_dir, 'avd')
+ env['ANDROID_SDK_HOME'] = os.path.abspath(os.path.join(avd_home_dir, '..'))
+
+ command = [
+ "emulator", "-avd", self.emulator["name"],
+ "-port", str(self.emulator["emulator_port"]),
+ ]
+ if "emulator_extra_args" in self.config:
+ command += self.config["emulator_extra_args"].split()
+
+ tmp_file = tempfile.NamedTemporaryFile(mode='w')
+ tmp_stdout = open(tmp_file.name, 'w')
+ self.info("Created temp file %s." % tmp_file.name)
+ self.info("Trying to start the emulator with this command: %s" % ' '.join(command))
+ proc = subprocess.Popen(command, stdout=tmp_stdout, stderr=tmp_stdout, env=env)
+ return {
+ "process": proc,
+ "tmp_file": tmp_file,
+ }
+
+ def _retry(self, max_attempts, interval, func, description, max_time = 0):
+ '''
+ Execute func until it returns True, up to max_attempts times, waiting for
+ interval seconds between each attempt. description is logged on each attempt.
+ If max_time is specified, no further attempts will be made once max_time
+ seconds have elapsed; this provides some protection for the case where
+ the run-time for func is long or highly variable.
+ '''
+ status = False
+ attempts = 0
+ if max_time > 0:
+ end_time = datetime.datetime.now() + datetime.timedelta(seconds = max_time)
+ else:
+ end_time = None
+ while attempts < max_attempts and not status:
+ if (end_time is not None) and (datetime.datetime.now() > end_time):
+ self.info("Maximum retry run-time of %d seconds exceeded; remaining attempts abandoned" % max_time)
+ break
+ if attempts != 0:
+ self.info("Sleeping %d seconds" % interval)
+ time.sleep(interval)
+ attempts += 1
+ self.info(">> %s: Attempt #%d of %d" % (description, attempts, max_attempts))
+ status = func()
+ return status
+
+ def _run_with_timeout(self, timeout, cmd):
+ timeout_cmd = ['timeout', '%s' % timeout] + cmd
+ return self._run_proc(timeout_cmd)
+
+ def _run_proc(self, cmd):
+ self.info('Running %s' % subprocess.list2cmdline(cmd))
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+ out, err = p.communicate()
+ if out:
+ self.info('%s' % str(out.strip()))
+ if err:
+ self.info('stderr: %s' % str(err.strip()))
+ return out
+
+ def _telnet_cmd(self, telnet, command):
+ telnet.write('%s\n' % command)
+ result = telnet.read_until('OK', 10)
+ self.info('%s: %s' % (command, result))
+ return result
+
+ def _verify_adb(self):
+ self.info('Verifying adb connectivity')
+ self._run_with_timeout(180, [self.adb_path, 'wait-for-device'])
+ return True
+
+ def _verify_adb_device(self):
+ out = self._run_with_timeout(30, [self.adb_path, 'devices'])
+ if (self.emulator['device_id'] in out) and ("device" in out):
+ return True
+ return False
+
+ def _is_boot_completed(self):
+ boot_cmd = [self.adb_path, '-s', self.emulator['device_id'],
+ 'shell', 'getprop', 'sys.boot_completed']
+ out = self._run_with_timeout(30, boot_cmd)
+ if out.strip() == '1':
+ return True
+ return False
+
+ def _telnet_to_emulator(self):
+ port = self.emulator["emulator_port"]
+ telnet_ok = False
+ try:
+ tn = telnetlib.Telnet('localhost', port, 10)
+ if tn is not None:
+ self.info('Connected to port %d' % port)
+ res = tn.read_until('OK', 10)
+ self.info(res)
+ self._telnet_cmd(tn, 'avd status')
+ self._telnet_cmd(tn, 'redir list')
+ self._telnet_cmd(tn, 'network status')
+ tn.write('quit\n')
+ tn.read_all()
+ telnet_ok = True
+ else:
+ self.warning('Unable to connect to port %d' % port)
+ except socket.error, e:
+ self.info('Trying again after socket error: %s' % str(e))
+ pass
+ except EOFError:
+ self.info('Trying again after EOF')
+ pass
+ except:
+ self.info('Trying again after unexpected exception')
+ pass
+ finally:
+ if tn is not None:
+ tn.close()
+ return telnet_ok
+
+ def _verify_emulator(self):
+ adb_ok = self._verify_adb()
+ if not adb_ok:
+ self.warning('Unable to communicate with adb')
+ return False
+ adb_device_ok = self._retry(4, 30, self._verify_adb_device, "Verify emulator visible to adb")
+ if not adb_device_ok:
+ self.warning('Unable to communicate with emulator via adb')
+ return False
+ boot_ok = self._retry(30, 10, self._is_boot_completed, "Verify Android boot completed", max_time = 330)
+ if not boot_ok:
+ self.warning('Unable to verify Android boot completion')
+ return False
+ telnet_ok = self._retry(4, 30, self._telnet_to_emulator, "Verify telnet to emulator")
+ if not telnet_ok:
+ self.warning('Unable to telnet to emulator on port %d' % self.emulator["emulator_port"])
+ return False
+ return True
+
+ def _verify_emulator_and_restart_on_fail(self):
+ emulator_ok = self._verify_emulator()
+ if not emulator_ok:
+ self._dump_host_state()
+ self._screenshot("emulator-startup-screenshot-")
+ self._kill_processes(self.config["emulator_process_name"])
+ self._run_proc(['ps', '-ef'])
+ self._dump_emulator_log()
+ # remove emulator tmp files
+ for dir in glob.glob("/tmp/android-*"):
+ self.rmtree(dir)
+ self._restart_adbd()
+ time.sleep(5)
+ self.emulator_proc = self._launch_emulator()
+ return emulator_ok
+
+ def _install_fennec_apk(self):
+ install_ok = False
+ if int(self.sdk_level) >= 23:
+ cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', '-g', self.installer_path]
+ else:
+ cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', self.installer_path]
+ out = self._run_with_timeout(300, cmd)
+ if 'Success' in out:
+ install_ok = True
+ return install_ok
+
+ def _install_robocop_apk(self):
+ install_ok = False
+ if int(self.sdk_level) >= 23:
+ cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', '-g', self.robocop_path]
+ else:
+ cmd = [self.adb_path, '-s', self.emulator['device_id'], 'install', '-r', self.robocop_path]
+ out = self._run_with_timeout(300, cmd)
+ if 'Success' in out:
+ install_ok = True
+ return install_ok
+
+ def _dump_host_state(self):
+ self._run_proc(['ps', '-ef'])
+ self._run_proc(['netstat', '-a', '-p', '-n', '-t', '-u'])
+
+ def _dump_emulator_log(self):
+ self.info("##### %s emulator log begins" % self.emulator["name"])
+ output = self.read_from_file(self.emulator_proc["tmp_file"].name, verbose=False)
+ if output:
+ self.info(output)
+ self.info("##### %s emulator log ends" % self.emulator["name"])
+
+ def _kill_processes(self, process_name):
+ p = subprocess.Popen(['ps', '-A'], stdout=subprocess.PIPE)
+ out, err = p.communicate()
+ self.info("Let's kill every process called %s" % process_name)
+ for line in out.splitlines():
+ if process_name in line:
+ pid = int(line.split(None, 1)[0])
+ self.info("Killing pid %d." % pid)
+ os.kill(pid, signal.SIGKILL)
+
+ def _restart_adbd(self):
+ self._run_with_timeout(30, [self.adb_path, 'kill-server'])
+ self._run_with_timeout(30, [self.adb_path, 'start-server'])
+
+ def _screenshot(self, prefix):
+ """
+ Save a screenshot of the entire screen to the blob upload directory.
+ """
+ dirs = self.query_abs_dirs()
+ utility = os.path.join(self.xre_path, "screentopng")
+ if not os.path.exists(utility):
+ self.warning("Unable to take screenshot: %s does not exist" % utility)
+ return
+ try:
+ tmpfd, filename = tempfile.mkstemp(prefix=prefix, suffix='.png',
+ dir=dirs['abs_blob_upload_dir'])
+ os.close(tmpfd)
+ self.info("Taking screenshot with %s; saving to %s" % (utility, filename))
+ subprocess.call([utility, filename], env=self.query_env())
+ except OSError, err:
+ self.warning("Failed to take screenshot: %s" % err.strerror)
+
+ def _query_package_name(self):
+ if self.app_name is None:
+ #find appname from package-name.txt - assumes download-and-extract has completed successfully
+ apk_dir = self.abs_dirs['abs_work_dir']
+ self.apk_path = os.path.join(apk_dir, self.installer_path)
+ unzip = self.query_exe("unzip")
+ package_path = os.path.join(apk_dir, 'package-name.txt')
+ unzip_cmd = [unzip, '-q', '-o', self.apk_path]
+ self.run_command(unzip_cmd, cwd=apk_dir, halt_on_failure=True)
+ self.app_name = str(self.read_from_file(package_path, verbose=True)).rstrip()
+ return self.app_name
+
+ def preflight_install(self):
+ # in the base class, this checks for mozinstall, but we don't use it
+ pass
+
+ def _build_command(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+
+ if self.test_suite not in self.config["suite_definitions"]:
+ self.fatal("Key '%s' not defined in the config!" % self.test_suite)
+
+ cmd = [
+ self.query_python_path('python'),
+ '-u',
+ os.path.join(
+ self._query_tests_dir(),
+ self.config["suite_definitions"][self.test_suite]["run_filename"]
+ ),
+ ]
+
+ raw_log_file = os.path.join(dirs['abs_blob_upload_dir'],
+ '%s_raw.log' % self.test_suite)
+
+ error_summary_file = os.path.join(dirs['abs_blob_upload_dir'],
+ '%s_errorsummary.log' % self.test_suite)
+ str_format_values = {
+ 'app': self._query_package_name(),
+ 'remote_webserver': c['remote_webserver'],
+ 'xre_path': self.xre_path,
+ 'utility_path': self.xre_path,
+ 'http_port': self.emulator['http_port'],
+ 'ssl_port': self.emulator['ssl_port'],
+ 'certs_path': os.path.join(dirs['abs_work_dir'], 'tests/certs'),
+ # TestingMixin._download_and_extract_symbols() will set
+ # self.symbols_path when downloading/extracting.
+ 'symbols_path': self.symbols_path,
+ 'modules_dir': dirs['abs_modules_dir'],
+ 'installer_path': self.installer_path,
+ 'raw_log_file': raw_log_file,
+ 'error_summary_file': error_summary_file,
+ 'dm_trans': c['device_manager'],
+ # marionette options
+ 'address': c.get('marionette_address'),
+ 'gecko_log': os.path.join(dirs["abs_blob_upload_dir"], 'gecko.log'),
+ 'test_manifest': os.path.join(
+ dirs['abs_marionette_tests_dir'],
+ self.config.get('marionette_test_manifest', '')
+ ),
+ }
+ for option in self.config["suite_definitions"][self.test_suite]["options"]:
+ opt = option.split('=')[0]
+ # override configured chunk options with script args, if specified
+ if opt == '--this-chunk' and self.this_chunk is not None:
+ continue
+ if opt == '--total-chunks' and self.total_chunks is not None:
+ continue
+ cmd.extend([option % str_format_values])
+
+ if self.this_chunk is not None:
+ cmd.extend(['--this-chunk', self.this_chunk])
+ if self.total_chunks is not None:
+ cmd.extend(['--total-chunks', self.total_chunks])
+
+ try_options, try_tests = self.try_args(self.test_suite)
+ cmd.extend(try_options)
+ cmd.extend(self.query_tests_args(
+ self.config["suite_definitions"][self.test_suite].get("tests"),
+ None,
+ try_tests))
+
+ return cmd
+
+ def _get_repo_url(self, path):
+ """
+ Return a url for a file (typically a tooltool manifest) in this hg repo
+ and using this revision (or mozilla-central/default if repo/rev cannot
+ be determined).
+
+ :param path specifies the directory path to the file of interest.
+ """
+ if 'GECKO_HEAD_REPOSITORY' in os.environ and 'GECKO_HEAD_REV' in os.environ:
+ # probably taskcluster
+ repo = os.environ['GECKO_HEAD_REPOSITORY']
+ revision = os.environ['GECKO_HEAD_REV']
+ elif self.buildbot_config and 'properties' in self.buildbot_config:
+ # probably buildbot
+ repo = 'https://hg.mozilla.org/%s' % self.buildbot_config['properties']['repo_path']
+ revision = self.buildbot_config['properties']['revision']
+ else:
+ # something unexpected!
+ repo = 'https://hg.mozilla.org/mozilla-central'
+ revision = 'default'
+ self.warning('Unable to find repo/revision for manifest; using mozilla-central/default')
+ url = '%s/raw-file/%s/%s' % (
+ repo,
+ revision,
+ path)
+ return url
+
+ def _tooltool_fetch(self, url, dir):
+ c = self.config
+
+ manifest_path = self.download_file(
+ url,
+ file_name='releng.manifest',
+ parent_dir=dir
+ )
+
+ if not os.path.exists(manifest_path):
+ self.fatal("Could not retrieve manifest needed to retrieve "
+ "artifacts from %s" % manifest_path)
+
+ self.tooltool_fetch(manifest_path,
+ output_dir=dir,
+ cache=c.get("tooltool_cache", None))
+
+ ##########################################
+ ### Actions for AndroidEmulatorTest ###
+ ##########################################
+ def setup_avds(self):
+ '''
+ If tooltool cache mechanism is enabled, the cached version is used by
+ the fetch command. If the manifest includes an "unpack" field, tooltool
+ will unpack all compressed archives mentioned in the manifest.
+ '''
+ c = self.config
+ dirs = self.query_abs_dirs()
+
+ # FIXME
+ # Clobbering and re-unpacking would not be needed if we had a way to
+ # check whether the unpacked content already present match the
+ # contents of the tar ball
+ self.rmtree(dirs['abs_avds_dir'])
+ self.mkdir_p(dirs['abs_avds_dir'])
+ if 'avd_url' in c:
+ # Intended for experimental setups to evaluate an avd prior to
+ # tooltool deployment.
+ url = c['avd_url']
+ self.download_unpack(url, dirs['abs_avds_dir'])
+ else:
+ url = self._get_repo_url(c["tooltool_manifest_path"])
+ self._tooltool_fetch(url, dirs['abs_avds_dir'])
+
+ avd_home_dir = self.abs_dirs['abs_avds_dir']
+ if avd_home_dir != "/home/cltbld/.android":
+ # Modify the downloaded avds to point to the right directory.
+ cmd = [
+ 'bash', '-c',
+ 'sed -i "s|/home/cltbld/.android|%s|" %s/test-*.ini' %
+ (avd_home_dir, os.path.join(avd_home_dir, 'avd'))
+ ]
+ proc = ProcessHandler(cmd)
+ proc.run()
+ proc.wait()
+
+ def start_emulator(self):
+ '''
+ Starts the emulator
+ '''
+ if 'emulator_url' in self.config or 'emulator_manifest' in self.config or 'tools_manifest' in self.config:
+ self.install_emulator()
+
+ if not os.path.isfile(self.adb_path):
+ self.fatal("The adb binary '%s' is not a valid file!" % self.adb_path)
+ self._restart_adbd()
+
+ if not self.config.get("developer_mode"):
+ # We kill compiz because it sometimes prevents us from starting the emulator
+ self._kill_processes("compiz")
+ self._kill_processes("xpcshell")
+
+ # We add a symlink for libGL.so because the emulator dlopen()s it by that name
+ # even though the installed library on most systems without dev packages is
+ # libGL.so.1
+ linkfile = os.path.join(self.abs_dirs['abs_work_dir'], "libGL.so")
+ self.info("Attempting to establish symlink for %s" % linkfile)
+ try:
+ os.unlink(linkfile)
+ except OSError:
+ pass
+ for libdir in ["/usr/lib/x86_64-linux-gnu/mesa",
+ "/usr/lib/i386-linux-gnu/mesa",
+ "/usr/lib/mesa"]:
+ libfile = os.path.join(libdir, "libGL.so.1")
+ if os.path.exists(libfile):
+ self.info("Symlinking %s -> %s" % (linkfile, libfile))
+ self.mkdir_p(self.abs_dirs['abs_work_dir'])
+ os.symlink(libfile, linkfile)
+ break
+ self.emulator_proc = self._launch_emulator()
+
+ def verify_emulator(self):
+ '''
+ Check to see if the emulator can be contacted via adb and telnet.
+ If any communication attempt fails, kill the emulator, re-launch, and re-check.
+ '''
+ self.mkdir_p(self.query_abs_dirs()['abs_blob_upload_dir'])
+ max_restarts = 5
+ emulator_ok = self._retry(max_restarts, 10, self._verify_emulator_and_restart_on_fail, "Check emulator")
+ if not emulator_ok:
+ self.fatal('INFRA-ERROR: Unable to start emulator after %d attempts' % max_restarts)
+ # Start logcat for the emulator. The adb process runs until the
+ # corresponding emulator is killed. Output is written directly to
+ # the blobber upload directory so that it is uploaded automatically
+ # at the end of the job.
+ logcat_filename = 'logcat-%s.log' % self.emulator["device_id"]
+ logcat_path = os.path.join(self.abs_dirs['abs_blob_upload_dir'], logcat_filename)
+ logcat_cmd = '%s -s %s logcat -v threadtime Trace:S StrictMode:S ExchangeService:S > %s &' % \
+ (self.adb_path, self.emulator["device_id"], logcat_path)
+ self.info(logcat_cmd)
+ os.system(logcat_cmd)
+ # Get a post-boot emulator process list for diagnostics
+ ps_cmd = [self.adb_path, '-s', self.emulator["device_id"], 'shell', 'ps']
+ self._run_with_timeout(30, ps_cmd)
+
+ def download_and_extract(self):
+ """
+ Download and extract fennec APK, tests.zip, host utils, and robocop (if required).
+ """
+ super(AndroidEmulatorTest, self).download_and_extract(suite_categories=[self.test_suite])
+ dirs = self.query_abs_dirs()
+ if self.test_suite.startswith('robocop'):
+ robocop_url = self.installer_url[:self.installer_url.rfind('/')] + '/robocop.apk'
+ self.info("Downloading robocop...")
+ self.download_file(robocop_url, 'robocop.apk', dirs['abs_work_dir'], error_level=FATAL)
+ self.rmtree(dirs['abs_xre_dir'])
+ self.mkdir_p(dirs['abs_xre_dir'])
+ if self.config["hostutils_manifest_path"]:
+ url = self._get_repo_url(self.config["hostutils_manifest_path"])
+ self._tooltool_fetch(url, dirs['abs_xre_dir'])
+ for p in glob.glob(os.path.join(dirs['abs_xre_dir'], 'host-utils-*')):
+ if os.path.isdir(p) and os.path.isfile(os.path.join(p, 'xpcshell')):
+ self.xre_path = p
+ if not self.xre_path:
+ self.fatal("xre path not found in %s" % dirs['abs_xre_dir'])
+ else:
+ self.fatal("configure hostutils_manifest_path!")
+
+ def install(self):
+ """
+ Install APKs on the emulator
+ """
+ assert self.installer_path is not None, \
+ "Either add installer_path to the config or use --installer-path."
+ install_needed = self.config["suite_definitions"][self.test_suite].get("install")
+ if install_needed == False:
+ self.info("Skipping apk installation for %s" % self.test_suite)
+ return
+
+ self.sdk_level = self._run_with_timeout(30, [self.adb_path, '-s', self.emulator['device_id'],
+ 'shell', 'getprop', 'ro.build.version.sdk'])
+
+ # Install Fennec
+ install_ok = self._retry(3, 30, self._install_fennec_apk, "Install Fennec APK")
+ if not install_ok:
+ self.fatal('INFRA-ERROR: Failed to install %s on %s' % (self.installer_path, self.emulator["name"]))
+
+ # Install Robocop if required
+ if self.test_suite.startswith('robocop'):
+ install_ok = self._retry(3, 30, self._install_robocop_apk, "Install Robocop APK")
+ if not install_ok:
+ self.fatal('INFRA-ERROR: Failed to install %s on %s' % (self.robocop_path, self.emulator["name"]))
+
+ self.info("Finished installing apps for %s" % self.emulator["name"])
+
+ def run_tests(self):
+ """
+ Run the tests
+ """
+ cmd = self._build_command()
+
+ try:
+ cwd = self._query_tests_dir()
+ except:
+ self.fatal("Don't know how to run --test-suite '%s'!" % self.test_suite)
+ env = self.query_env()
+ if self.query_minidump_stackwalk():
+ env['MINIDUMP_STACKWALK'] = self.minidump_stackwalk_path
+ env['MOZ_UPLOAD_DIR'] = self.query_abs_dirs()['abs_blob_upload_dir']
+ env['MINIDUMP_SAVE_PATH'] = self.query_abs_dirs()['abs_blob_upload_dir']
+
+ self.info("Running on %s the command %s" % (self.emulator["name"], subprocess.list2cmdline(cmd)))
+ self.info("##### %s log begins" % self.test_suite)
+
+ # TinderBoxPrintRe does not know about the '-debug' categories
+ aliases = {
+ 'reftest-debug': 'reftest',
+ 'jsreftest-debug': 'jsreftest',
+ 'crashtest-debug': 'crashtest',
+ }
+ suite_category = aliases.get(self.test_suite, self.test_suite)
+ parser = self.get_test_output_parser(
+ suite_category,
+ config=self.config,
+ log_obj=self.log_obj,
+ error_list=self.error_list)
+ self.run_command(cmd, cwd=cwd, env=env, output_parser=parser)
+ tbpl_status, log_level = parser.evaluate_parser(0)
+ parser.append_tinderboxprint_line(self.test_suite)
+
+ self.info("##### %s log ends" % self.test_suite)
+ self._dump_emulator_log()
+ self.buildbot_status(tbpl_status, level=log_level)
+
+ @PostScriptAction('run-tests')
+ def stop_emulator(self, action, success=None):
+ '''
+ Report emulator health, then make sure that the emulator has been stopped
+ '''
+ self._verify_emulator()
+ self._kill_processes(self.config["emulator_process_name"])
+
+ def upload_blobber_files(self):
+ '''
+ Override BlobUploadMixin.upload_blobber_files to ensure emulator is killed
+ first (if the emulator is still running, logcat may still be running, which
+ may lock the blob upload directory, causing a hang).
+ '''
+ if self.config.get('blob_upload_branch'):
+ # Except on interactive workers, we want the emulator to keep running
+ # after the script is finished. So only kill it if blobber would otherwise
+ # have run anyway (it doesn't get run on interactive workers).
+ self._kill_processes(self.config["emulator_process_name"])
+ super(AndroidEmulatorTest, self).upload_blobber_files()
+
+if __name__ == '__main__':
+ emulatorTest = AndroidEmulatorTest()
+ emulatorTest.run_and_exit()
diff --git a/testing/mozharness/scripts/bouncer_submitter.py b/testing/mozharness/scripts/bouncer_submitter.py
new file mode 100755
index 000000000..eaa43e851
--- /dev/null
+++ b/testing/mozharness/scripts/bouncer_submitter.py
@@ -0,0 +1,192 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.bouncer.submitter import BouncerSubmitterMixin
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.purge import PurgeMixin
+
+
+class BouncerSubmitter(BaseScript, PurgeMixin, BouncerSubmitterMixin, BuildbotMixin):
+ config_options = [
+ [["--repo"], {
+ "dest": "repo",
+ "help": "Specify source repo, e.g. releases/mozilla-beta",
+ }],
+ [["--revision"], {
+ "dest": "revision",
+ "help": "Source revision/tag used to fetch shipped-locales",
+ }],
+ [["--version"], {
+ "dest": "version",
+ "help": "Current version",
+ }],
+ [["--previous-version"], {
+ "dest": "prev_versions",
+ "action": "extend",
+ "help": "Previous version(s)",
+ }],
+ [["--build-number"], {
+ "dest": "build_number",
+ "help": "Build number of version",
+ }],
+ [["--bouncer-api-prefix"], {
+ "dest": "bouncer-api-prefix",
+ "help": "Bouncer admin API URL prefix",
+ }],
+ [["--credentials-file"], {
+ "dest": "credentials_file",
+ "help": "File containing Bouncer credentials",
+ }],
+ ]
+
+ def __init__(self, require_config_file=True):
+ BaseScript.__init__(self,
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ # other stuff
+ all_actions=[
+ 'clobber',
+ 'download-shipped-locales',
+ 'submit',
+ ],
+ default_actions=[
+ 'clobber',
+ 'download-shipped-locales',
+ 'submit',
+ ],
+ config={
+ 'buildbot_json_path' : 'buildprops.json'
+ }
+ )
+ self.locales = None
+ self.credentials = None
+
+ def _pre_config_lock(self, rw_config):
+ super(BouncerSubmitter, self)._pre_config_lock(rw_config)
+
+ #override properties from buildbot properties here as defined by taskcluster properties
+ self.read_buildbot_config()
+
+ #check if release promotion is true first before overwriting these properties
+ if self.buildbot_config["properties"].get("release_promotion"):
+ for prop in ['product', 'version', 'build_number', 'revision', 'bouncer_submitter_config', ]:
+ if self.buildbot_config["properties"].get(prop):
+ self.info("Overriding %s with %s" % (prop, self.buildbot_config["properties"].get(prop)))
+ self.config[prop] = self.buildbot_config["properties"].get(prop)
+ if self.buildbot_config["properties"].get("partial_versions"):
+ self.config["prev_versions"] = self.buildbot_config["properties"].get("partial_versions").split(", ")
+
+ for opt in ["version", "credentials_file", "bouncer-api-prefix"]:
+ if opt not in self.config:
+ self.fatal("%s must be specified" % opt)
+ if self.need_shipped_locales():
+ for opt in ["shipped-locales-url", "repo", "revision"]:
+ if opt not in self.config:
+ self.fatal("%s must be specified" % opt)
+
+ def need_shipped_locales(self):
+ return any(e.get("add-locales") for e in
+ self.config["products"].values())
+
+ def query_shipped_locales_path(self):
+ dirs = self.query_abs_dirs()
+ return os.path.join(dirs["abs_work_dir"], "shipped-locales")
+
+ def download_shipped_locales(self):
+ if not self.need_shipped_locales():
+ self.info("No need to download shipped-locales")
+ return
+
+ replace_dict = {"revision": self.config["revision"],
+ "repo": self.config["repo"]}
+ url = self.config["shipped-locales-url"] % replace_dict
+ dirs = self.query_abs_dirs()
+ self.mkdir_p(dirs["abs_work_dir"])
+ if not self.download_file(url=url,
+ file_name=self.query_shipped_locales_path()):
+ self.fatal("Unable to fetch shipped-locales from %s" % url)
+ # populate the list
+ self.load_shipped_locales()
+
+ def load_shipped_locales(self):
+ if self.locales:
+ return self.locales
+ content = self.read_from_file(self.query_shipped_locales_path())
+ locales = []
+ for line in content.splitlines():
+ locale = line.split()[0]
+ if locale:
+ locales.append(locale)
+ self.locales = locales
+ return self.locales
+
+ def submit(self):
+ subs = {
+ "version": self.config["version"]
+ }
+ if self.config.get("build_number"):
+ subs["build_number"] = self.config["build_number"]
+
+ for product, pr_config in sorted(self.config["products"].items()):
+ product_name = pr_config["product-name"] % subs
+ if self.product_exists(product_name):
+ self.warning("Product %s already exists. Skipping..." %
+ product_name)
+ continue
+ self.info("Adding %s..." % product)
+ self.api_add_product(
+ product_name=product_name,
+ add_locales=pr_config.get("add-locales"),
+ ssl_only=pr_config.get("ssl-only"))
+ self.info("Adding paths...")
+ for platform, pl_config in sorted(pr_config["paths"].items()):
+ bouncer_platform = pl_config["bouncer-platform"]
+ path = pl_config["path"] % subs
+ self.info("%s (%s): %s" % (platform, bouncer_platform, path))
+ self.api_add_location(product_name, bouncer_platform, path)
+
+ # Add partial updates
+ if "partials" in self.config and self.config.get("prev_versions"):
+ self.submit_partials()
+
+ def submit_partials(self):
+ subs = {
+ "version": self.config["version"]
+ }
+ if self.config.get("build_number"):
+ subs["build_number"] = self.config["build_number"]
+ prev_versions = self.config.get("prev_versions")
+ for product, part_config in sorted(self.config["partials"].items()):
+ product_name_tmpl = part_config["product-name"]
+ for prev_version in prev_versions:
+ prev_version, prev_build_number = prev_version.split("build")
+ subs["prev_version"] = prev_version
+ subs["prev_build_number"] = prev_build_number
+ product_name = product_name_tmpl % subs
+ if self.product_exists(product_name):
+ self.warning("Product %s already exists. Skipping..." %
+ product_name)
+ continue
+ self.info("Adding partial updates for %s" % product_name)
+ self.api_add_product(
+ product_name=product_name,
+ add_locales=part_config.get("add-locales"),
+ ssl_only=part_config.get("ssl-only"))
+ for platform, pl_config in sorted(part_config["paths"].items()):
+ bouncer_platform = pl_config["bouncer-platform"]
+ path = pl_config["path"] % subs
+ self.info("%s (%s): %s" % (platform, bouncer_platform, path))
+ self.api_add_location(product_name, bouncer_platform, path)
+
+
+if __name__ == '__main__':
+ myScript = BouncerSubmitter()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/configtest.py b/testing/mozharness/scripts/configtest.py
new file mode 100755
index 000000000..5db684f0a
--- /dev/null
+++ b/testing/mozharness/scripts/configtest.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""configtest.py
+
+Verify the .json and .py files in the configs/ directory are well-formed.
+Further tests to verify validity would be desirable.
+
+This is also a good example script to look at to understand mozharness.
+"""
+
+import os
+import pprint
+import sys
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.script import BaseScript
+
+# ConfigTest {{{1
+class ConfigTest(BaseScript):
+ config_options = [[
+ ["--test-file",],
+ {"action": "extend",
+ "dest": "test_files",
+ "help": "Specify which config files to test"
+ }
+ ]]
+
+ def __init__(self, require_config_file=False):
+ self.config_files = []
+ BaseScript.__init__(self, config_options=self.config_options,
+ all_actions=['list-config-files',
+ 'test-json-configs',
+ 'test-python-configs',
+ 'summary',
+ ],
+ default_actions=['test-json-configs',
+ 'test-python-configs',
+ 'summary',
+ ],
+ require_config_file=require_config_file)
+
+ def query_config_files(self):
+ """This query method, much like others, caches its runtime
+ settings in self.VAR so we don't have to figure out config_files
+ multiple times.
+ """
+ if self.config_files:
+ return self.config_files
+ c = self.config
+ if 'test_files' in c:
+ self.config_files = c['test_files']
+ return self.config_files
+ self.debug("No --test-file(s) specified; defaulting to crawling the configs/ directory.")
+ config_files = []
+ for root, dirs, files in os.walk(os.path.join(sys.path[0], "..",
+ "configs")):
+ for name in files:
+ # Hardcode =P
+ if name.endswith(".json") or name.endswith(".py"):
+ if not name.startswith("test_malformed"):
+ config_files.append(os.path.join(root, name))
+ self.config_files = config_files
+ return self.config_files
+
+ def list_config_files(self):
+ """ Non-default action that is mainly here to demonstrate how
+ non-default actions work in a mozharness script.
+ """
+ config_files = self.query_config_files()
+ for config_file in config_files:
+ self.info(config_file)
+
+ def test_json_configs(self):
+ """ Currently only "is this well-formed json?"
+
+ """
+ config_files = self.query_config_files()
+ filecount = [0, 0]
+ for config_file in config_files:
+ if config_file.endswith(".json"):
+ filecount[0] += 1
+ self.info("Testing %s." % config_file)
+ contents = self.read_from_file(config_file, verbose=False)
+ try:
+ json.loads(contents)
+ except ValueError:
+ self.add_summary("%s is invalid json." % config_file,
+ level="error")
+ self.error(pprint.pformat(sys.exc_info()[1]))
+ else:
+ self.info("Good.")
+ filecount[1] += 1
+ if filecount[0]:
+ self.add_summary("%d of %d json config files were good." %
+ (filecount[1], filecount[0]))
+ else:
+ self.add_summary("No json config files to test.")
+
+ def test_python_configs(self):
+ """Currently only "will this give me a config dictionary?"
+
+ """
+ config_files = self.query_config_files()
+ filecount = [0, 0]
+ for config_file in config_files:
+ if config_file.endswith(".py"):
+ filecount[0] += 1
+ self.info("Testing %s." % config_file)
+ global_dict = {}
+ local_dict = {}
+ try:
+ execfile(config_file, global_dict, local_dict)
+ except:
+ self.add_summary("%s is invalid python." % config_file,
+ level="error")
+ self.error(pprint.pformat(sys.exc_info()[1]))
+ else:
+ if 'config' in local_dict and isinstance(local_dict['config'], dict):
+ self.info("Good.")
+ filecount[1] += 1
+ else:
+ self.add_summary("%s is valid python, but doesn't create a config dictionary." %
+ config_file, level="error")
+ if filecount[0]:
+ self.add_summary("%d of %d python config files were good." %
+ (filecount[1], filecount[0]))
+ else:
+ self.add_summary("No python config files to test.")
+
+# __main__ {{{1
+if __name__ == '__main__':
+ config_test = ConfigTest()
+ config_test.run_and_exit()
diff --git a/testing/mozharness/scripts/desktop_l10n.py b/testing/mozharness/scripts/desktop_l10n.py
new file mode 100755
index 000000000..0626ce35b
--- /dev/null
+++ b/testing/mozharness/scripts/desktop_l10n.py
@@ -0,0 +1,1152 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""desktop_l10n.py
+
+This script manages Desktop repacks for nightly builds.
+"""
+import os
+import re
+import sys
+import time
+import shlex
+import subprocess
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.errors import BaseErrorList, MakefileErrorList
+from mozharness.base.script import BaseScript
+from mozharness.base.transfer import TransferMixin
+from mozharness.base.vcs.vcsbase import VCSMixin
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.purge import PurgeMixin
+from mozharness.mozilla.building.buildbase import MakeUploadOutputParser
+from mozharness.mozilla.l10n.locales import LocalesMixin
+from mozharness.mozilla.mar import MarMixin
+from mozharness.mozilla.mock import MockMixin
+from mozharness.mozilla.release import ReleaseMixin
+from mozharness.mozilla.signing import SigningMixin
+from mozharness.mozilla.updates.balrog import BalrogMixin
+from mozharness.mozilla.taskcluster_helper import Taskcluster
+from mozharness.base.python import VirtualenvMixin
+from mozharness.mozilla.mock import ERROR_MSGS
+
+try:
+ import simplejson as json
+ assert json
+except ImportError:
+ import json
+
+
+# needed by _map
+SUCCESS = 0
+FAILURE = 1
+
+SUCCESS_STR = "Success"
+FAILURE_STR = "Failed"
+
+# when running get_output_form_command, pymake has some extra output
+# that needs to be filtered out
+PyMakeIgnoreList = [
+ re.compile(r'''.*make\.py(?:\[\d+\])?: Entering directory'''),
+ re.compile(r'''.*make\.py(?:\[\d+\])?: Leaving directory'''),
+]
+
+
+# mandatory configuration options, without them, this script will not work
+# it's a list of values that are already known before starting a build
+configuration_tokens = ('branch',
+ 'platform',
+ 'update_platform',
+ 'update_channel',
+ 'ssh_key_dir',
+ 'stage_product',
+ 'upload_environment',
+ )
+# some other values such as "%(version)s", "%(buildid)s", ...
+# are defined at run time and they cannot be enforced in the _pre_config_lock
+# phase
+runtime_config_tokens = ('buildid', 'version', 'locale', 'from_buildid',
+ 'abs_objdir', 'abs_merge_dir', 'revision',
+ 'to_buildid', 'en_us_binary_url', 'mar_tools_url',
+ 'post_upload_extra', 'who')
+
+# DesktopSingleLocale {{{1
+class DesktopSingleLocale(LocalesMixin, ReleaseMixin, MockMixin, BuildbotMixin,
+ VCSMixin, SigningMixin, PurgeMixin, BaseScript,
+ BalrogMixin, MarMixin, VirtualenvMixin, TransferMixin):
+ """Manages desktop repacks"""
+ config_options = [[
+ ['--balrog-config', ],
+ {"action": "extend",
+ "dest": "config_files",
+ "type": "string",
+ "help": "Specify the balrog configuration file"}
+ ], [
+ ['--branch-config', ],
+ {"action": "extend",
+ "dest": "config_files",
+ "type": "string",
+ "help": "Specify the branch configuration file"}
+ ], [
+ ['--environment-config', ],
+ {"action": "extend",
+ "dest": "config_files",
+ "type": "string",
+ "help": "Specify the environment (staging, production, ...) configuration file"}
+ ], [
+ ['--platform-config', ],
+ {"action": "extend",
+ "dest": "config_files",
+ "type": "string",
+ "help": "Specify the platform configuration file"}
+ ], [
+ ['--locale', ],
+ {"action": "extend",
+ "dest": "locales",
+ "type": "string",
+ "help": "Specify the locale(s) to sign and update. Optionally pass"
+ " revision separated by colon, en-GB:default."}
+ ], [
+ ['--locales-file', ],
+ {"action": "store",
+ "dest": "locales_file",
+ "type": "string",
+ "help": "Specify a file to determine which locales to sign and update"}
+ ], [
+ ['--tag-override', ],
+ {"action": "store",
+ "dest": "tag_override",
+ "type": "string",
+ "help": "Override the tags set for all repos"}
+ ], [
+ ['--revision', ],
+ {"action": "store",
+ "dest": "revision",
+ "type": "string",
+ "help": "Override the gecko revision to use (otherwise use buildbot supplied"
+ " value, or en-US revision) "}
+ ], [
+ ['--user-repo-override', ],
+ {"action": "store",
+ "dest": "user_repo_override",
+ "type": "string",
+ "help": "Override the user repo path for all repos"}
+ ], [
+ ['--release-config-file', ],
+ {"action": "store",
+ "dest": "release_config_file",
+ "type": "string",
+ "help": "Specify the release config file to use"}
+ ], [
+ ['--this-chunk', ],
+ {"action": "store",
+ "dest": "this_locale_chunk",
+ "type": "int",
+ "help": "Specify which chunk of locales to run"}
+ ], [
+ ['--total-chunks', ],
+ {"action": "store",
+ "dest": "total_locale_chunks",
+ "type": "int",
+ "help": "Specify the total number of chunks of locales"}
+ ], [
+ ['--en-us-installer-url', ],
+ {"action": "store",
+ "dest": "en_us_installer_url",
+ "type": "string",
+ "help": "Specify the url of the en-us binary"}
+ ], [
+ ["--disable-mock"], {
+ "dest": "disable_mock",
+ "action": "store_true",
+ "help": "do not run under mock despite what gecko-config says"}
+ ]]
+
+ def __init__(self, require_config_file=True):
+ # fxbuild style:
+ buildscript_kwargs = {
+ 'all_actions': [
+ "clobber",
+ "pull",
+ "clone-locales",
+ "list-locales",
+ "setup",
+ "repack",
+ "taskcluster-upload",
+ "funsize-props",
+ "submit-to-balrog",
+ "summary",
+ ],
+ 'config': {
+ "buildbot_json_path": "buildprops.json",
+ "ignore_locales": ["en-US"],
+ "locales_dir": "browser/locales",
+ "update_mar_dir": "dist/update",
+ "buildid_section": "App",
+ "buildid_option": "BuildID",
+ "application_ini": "application.ini",
+ "log_name": "single_locale",
+ "clobber_file": 'CLOBBER',
+ "appName": "Firefox",
+ "hashType": "sha512",
+ "taskcluster_credentials_file": "oauth.txt",
+ 'virtualenv_modules': [
+ 'requests==2.8.1',
+ 'PyHawk-with-a-single-extra-commit==0.1.5',
+ 'taskcluster==0.0.26',
+ ],
+ 'virtualenv_path': 'venv',
+ },
+ }
+ #
+
+ LocalesMixin.__init__(self)
+ BaseScript.__init__(
+ self,
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ **buildscript_kwargs
+ )
+
+ self.buildid = None
+ self.make_ident_output = None
+ self.bootstrap_env = None
+ self.upload_env = None
+ self.revision = None
+ self.enUS_revision = None
+ self.version = None
+ self.upload_urls = {}
+ self.locales_property = {}
+ self.package_urls = {}
+ self.pushdate = None
+ # upload_files is a dictionary of files to upload, keyed by locale.
+ self.upload_files = {}
+
+ if 'mock_target' in self.config:
+ self.enable_mock()
+
+ def _pre_config_lock(self, rw_config):
+ """replaces 'configuration_tokens' with their values, before the
+ configuration gets locked. If some of the configuration_tokens
+ are not present, stops the execution of the script"""
+ # since values as branch, platform are mandatory, can replace them in
+ # in the configuration before it is locked down
+ # mandatory tokens
+ for token in configuration_tokens:
+ if token not in self.config:
+ self.fatal('No %s in configuration!' % token)
+
+ # all the important tokens are present in our configuration
+ for token in configuration_tokens:
+ # token_string '%(branch)s'
+ token_string = ''.join(('%(', token, ')s'))
+ # token_value => ash
+ token_value = self.config[token]
+ for element in self.config:
+ # old_value => https://hg.mozilla.org/projects/%(branch)s
+ old_value = self.config[element]
+ # new_value => https://hg.mozilla.org/projects/ash
+ new_value = self.__detokenise_element(self.config[element],
+ token_string,
+ token_value)
+ if new_value and new_value != old_value:
+ msg = "%s: replacing %s with %s" % (element,
+ old_value,
+ new_value)
+ self.debug(msg)
+ self.config[element] = new_value
+
+ # now, only runtime_config_tokens should be present in config
+ # we should parse self.config and fail if any other we spot any
+ # other token
+ tokens_left = set(self._get_configuration_tokens(self.config))
+ unknown_tokens = set(tokens_left) - set(runtime_config_tokens)
+ if unknown_tokens:
+ msg = ['unknown tokens in configuration:']
+ for t in unknown_tokens:
+ msg.append(t)
+ self.fatal(' '.join(msg))
+ self.info('configuration looks ok')
+
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ props = self.buildbot_config["properties"]
+ for prop in ['mar_tools_url']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+
+ def _get_configuration_tokens(self, iterable):
+ """gets a list of tokens in iterable"""
+ regex = re.compile('%\(\w+\)s')
+ results = []
+ try:
+ for element in iterable:
+ if isinstance(iterable, str):
+ # this is a string, look for tokens
+ # self.debug("{0}".format(re.findall(regex, element)))
+ tokens = re.findall(regex, iterable)
+ for token in tokens:
+ # clean %(branch)s => branch
+ # remove %(
+ token_name = token.partition('%(')[2]
+ # remove )s
+ token_name = token_name.partition(')s')[0]
+ results.append(token_name)
+ break
+
+ elif isinstance(iterable, (list, tuple)):
+ results.extend(self._get_configuration_tokens(element))
+
+ elif isinstance(iterable, dict):
+ results.extend(self._get_configuration_tokens(iterable[element]))
+
+ except TypeError:
+ # element is a int/float/..., nothing to do here
+ pass
+
+ # remove duplicates, and return results
+
+ return list(set(results))
+
+ def __detokenise_element(self, config_option, token, value):
+ """reads config_options and returns a version of the same config_option
+ replacing token with value recursively"""
+ # config_option is a string, let's replace token with value
+ if isinstance(config_option, str):
+ # if token does not appear in this string,
+ # nothing happens and the original value is returned
+ return config_option.replace(token, value)
+ # it's a dictionary
+ elif isinstance(config_option, dict):
+ # replace token for each element of this dictionary
+ for element in config_option:
+ config_option[element] = self.__detokenise_element(
+ config_option[element], token, value)
+ return config_option
+ # it's a list
+ elif isinstance(config_option, list):
+ # create a new list and append the replaced elements
+ new_list = []
+ for element in config_option:
+ new_list.append(self.__detokenise_element(element, token, value))
+ return new_list
+ elif isinstance(config_option, tuple):
+ # create a new list and append the replaced elements
+ new_list = []
+ for element in config_option:
+ new_list.append(self.__detokenise_element(element, token, value))
+ return tuple(new_list)
+ else:
+ # everything else, bool, number, ...
+ return config_option
+
+ # Helper methods {{{2
+ def query_bootstrap_env(self):
+ """returns the env for repacks"""
+ if self.bootstrap_env:
+ return self.bootstrap_env
+ config = self.config
+ replace_dict = self.query_abs_dirs()
+
+ replace_dict['en_us_binary_url'] = config.get('en_us_binary_url')
+ self.read_buildbot_config()
+ # Override en_us_binary_url if packageUrl is passed as a property from
+ # the en-US build
+ if self.buildbot_config["properties"].get("packageUrl"):
+ packageUrl = self.buildbot_config["properties"]["packageUrl"]
+ # trim off the filename, the build system wants a directory
+ packageUrl = packageUrl.rsplit('/', 1)[0]
+ self.info("Overriding en_us_binary_url with %s" % packageUrl)
+ replace_dict['en_us_binary_url'] = str(packageUrl)
+ # Override en_us_binary_url if passed as a buildbot property
+ if self.buildbot_config["properties"].get("en_us_binary_url"):
+ self.info("Overriding en_us_binary_url with %s" %
+ self.buildbot_config["properties"]["en_us_binary_url"])
+ replace_dict['en_us_binary_url'] = \
+ str(self.buildbot_config["properties"]["en_us_binary_url"])
+ bootstrap_env = self.query_env(partial_env=config.get("bootstrap_env"),
+ replace_dict=replace_dict)
+ if 'MOZ_SIGNING_SERVERS' in os.environ:
+ sign_cmd = self.query_moz_sign_cmd(formats=None)
+ sign_cmd = subprocess.list2cmdline(sign_cmd)
+ # windows fix
+ bootstrap_env['MOZ_SIGN_CMD'] = sign_cmd.replace('\\', '\\\\\\\\')
+ for binary in self._mar_binaries():
+ # "mar -> MAR" and 'mar.exe -> MAR' (windows)
+ name = binary.replace('.exe', '')
+ name = name.upper()
+ binary_path = os.path.join(self._mar_tool_dir(), binary)
+ # windows fix...
+ if binary.endswith('.exe'):
+ binary_path = binary_path.replace('\\', '\\\\\\\\')
+ bootstrap_env[name] = binary_path
+ if 'LOCALE_MERGEDIR' in bootstrap_env:
+ # windows fix
+ bootstrap_env['LOCALE_MERGEDIR'] = bootstrap_env['LOCALE_MERGEDIR'].replace('\\', '\\\\\\\\')
+ if self.query_is_nightly():
+ bootstrap_env["IS_NIGHTLY"] = "yes"
+ self.bootstrap_env = bootstrap_env
+ return self.bootstrap_env
+
+ def _query_upload_env(self):
+ """returns the environment used for the upload step"""
+ if self.upload_env:
+ return self.upload_env
+ config = self.config
+
+ replace_dict = {
+ 'buildid': self._query_buildid(),
+ 'version': self.query_version(),
+ 'post_upload_extra': ' '.join(config.get('post_upload_extra', [])),
+ 'upload_environment': config['upload_environment'],
+ }
+ if config['branch'] == 'try':
+ replace_dict.update({
+ 'who': self.query_who(),
+ 'revision': self._query_revision(),
+ })
+ upload_env = self.query_env(partial_env=config.get("upload_env"),
+ replace_dict=replace_dict)
+ # check if there are any extra option from the platform configuration
+ # and append them to the env
+
+ if 'upload_env_extra' in config:
+ for extra in config['upload_env_extra']:
+ upload_env[extra] = config['upload_env_extra'][extra]
+
+ self.upload_env = upload_env
+ return self.upload_env
+
+ def query_l10n_env(self):
+ l10n_env = self._query_upload_env().copy()
+ # both upload_env and bootstrap_env define MOZ_SIGN_CMD
+ # the one from upload_env is taken from os.environ, the one from
+ # bootstrap_env is set with query_moz_sign_cmd()
+ # we need to use the value provided my query_moz_sign_cmd or make upload
+ # will fail (signtool.py path is wrong)
+ l10n_env.update(self.query_bootstrap_env())
+ return l10n_env
+
+ def _query_make_ident_output(self):
+ """Get |make ident| output from the objdir.
+ Only valid after setup is run.
+ """
+ if self.make_ident_output:
+ return self.make_ident_output
+ dirs = self.query_abs_dirs()
+ self.make_ident_output = self._get_output_from_make(
+ target=["ident"],
+ cwd=dirs['abs_locales_dir'],
+ env=self.query_bootstrap_env())
+ return self.make_ident_output
+
+ def _query_buildid(self):
+ """Get buildid from the objdir.
+ Only valid after setup is run.
+ """
+ if self.buildid:
+ return self.buildid
+ r = re.compile(r"buildid (\d+)")
+ output = self._query_make_ident_output()
+ for line in output.splitlines():
+ match = r.match(line)
+ if match:
+ self.buildid = match.groups()[0]
+ return self.buildid
+
+ def _query_revision(self):
+ """ Get the gecko revision in this order of precedence
+ * cached value
+ * command line arg --revision (development, taskcluster)
+ * buildbot properties (try with buildbot forced build)
+ * buildbot change (try with buildbot scheduler)
+ * from the en-US build (m-c & m-a)
+
+ This will fail the last case if the build hasn't been pulled yet.
+ """
+ if self.revision:
+ return self.revision
+
+ self.read_buildbot_config()
+ config = self.config
+ revision = None
+ if config.get("revision"):
+ revision = config["revision"]
+ elif 'revision' in self.buildbot_properties:
+ revision = self.buildbot_properties['revision']
+ elif (self.buildbot_config and
+ self.buildbot_config.get('sourcestamp', {}).get('revision')):
+ revision = self.buildbot_config['sourcestamp']['revision']
+ elif self.buildbot_config and self.buildbot_config.get('revision'):
+ revision = self.buildbot_config['revision']
+ elif config.get("update_gecko_source_to_enUS", True):
+ revision = self._query_enUS_revision()
+
+ if not revision:
+ self.fatal("Can't determine revision!")
+ self.revision = str(revision)
+ return self.revision
+
+ def _query_enUS_revision(self):
+ """Get revision from the objdir.
+ Only valid after setup is run.
+ """
+ if self.enUS_revision:
+ return self.enUS_revision
+ r = re.compile(r"^(gecko|fx)_revision ([0-9a-f]+\+?)$")
+ output = self._query_make_ident_output()
+ for line in output.splitlines():
+ match = r.match(line)
+ if match:
+ self.enUS_revision = match.groups()[1]
+ return self.enUS_revision
+
+ def _query_make_variable(self, variable, make_args=None,
+ exclude_lines=PyMakeIgnoreList):
+ """returns the value of make echo-variable-<variable>
+ it accepts extra make arguements (make_args)
+ it also has an exclude_lines from the output filer
+ exclude_lines defaults to PyMakeIgnoreList because
+ on windows, pymake writes extra output lines that need
+ to be filtered out.
+ """
+ dirs = self.query_abs_dirs()
+ make_args = make_args or []
+ exclude_lines = exclude_lines or []
+ target = ["echo-variable-%s" % variable] + make_args
+ cwd = dirs['abs_locales_dir']
+ raw_output = self._get_output_from_make(target, cwd=cwd,
+ env=self.query_bootstrap_env())
+ # we want to log all the messages from make/pymake and
+ # exlcude some messages from the output ("Entering directory...")
+ output = []
+ for line in raw_output.split("\n"):
+ discard = False
+ for element in exclude_lines:
+ if element.match(line):
+ discard = True
+ continue
+ if not discard:
+ output.append(line.strip())
+ output = " ".join(output).strip()
+ self.info('echo-variable-%s: %s' % (variable, output))
+ return output
+
+ def query_version(self):
+ """Gets the version from the objdir.
+ Only valid after setup is run."""
+ if self.version:
+ return self.version
+ config = self.config
+ if config.get('release_config_file'):
+ release_config = self.query_release_config()
+ self.version = release_config['version']
+ else:
+ self.version = self._query_make_variable("MOZ_APP_VERSION")
+ return self.version
+
+ def _map(self, func, items):
+ """runs func for any item in items, calls the add_failure() for each
+ error. It assumes that function returns 0 when successful.
+ returns a two element tuple with (success_count, total_count)"""
+ success_count = 0
+ total_count = len(items)
+ name = func.__name__
+ for item in items:
+ result = func(item)
+ if result == SUCCESS:
+ # success!
+ success_count += 1
+ else:
+ # func failed...
+ message = 'failure: %s(%s)' % (name, item)
+ self._add_failure(item, message)
+ return (success_count, total_count)
+
+ def _add_failure(self, locale, message, **kwargs):
+ """marks current step as failed"""
+ self.locales_property[locale] = FAILURE_STR
+ prop_key = "%s_failure" % locale
+ prop_value = self.query_buildbot_property(prop_key)
+ if prop_value:
+ prop_value = "%s %s" % (prop_value, message)
+ else:
+ prop_value = message
+ self.set_buildbot_property(prop_key, prop_value, write_to_file=True)
+ BaseScript.add_failure(self, locale, message=message, **kwargs)
+
+ def query_failed_locales(self):
+ return [l for l, res in self.locales_property.items() if
+ res == FAILURE_STR]
+
+ def summary(self):
+ """generates a summary"""
+ BaseScript.summary(self)
+ # TODO we probably want to make this configurable on/off
+ locales = self.query_locales()
+ for locale in locales:
+ self.locales_property.setdefault(locale, SUCCESS_STR)
+ self.set_buildbot_property("locales",
+ json.dumps(self.locales_property),
+ write_to_file=True)
+
+ # Actions {{{2
+ def clobber(self):
+ """clobber"""
+ dirs = self.query_abs_dirs()
+ clobber_dirs = (dirs['abs_objdir'], dirs['abs_upload_dir'])
+ PurgeMixin.clobber(self, always_clobber_dirs=clobber_dirs)
+
+ def pull(self):
+ """pulls source code"""
+ config = self.config
+ dirs = self.query_abs_dirs()
+ repos = []
+ # replace dictionary for repos
+ # we need to interpolate some values:
+ # branch, branch_repo
+ # and user_repo_override if exists
+ replace_dict = {}
+ if config.get("user_repo_override"):
+ replace_dict['user_repo_override'] = config['user_repo_override']
+ # this is OK so early because we get it from buildbot, or
+ # the command line for local dev
+ replace_dict['revision'] = self._query_revision()
+
+ for repository in config['repos']:
+ current_repo = {}
+ for key, value in repository.iteritems():
+ try:
+ current_repo[key] = value % replace_dict
+ except TypeError:
+ # pass through non-interpolables, like booleans
+ current_repo[key] = value
+ except KeyError:
+ self.error('not all the values in "{0}" can be replaced. Check your configuration'.format(value))
+ raise
+ repos.append(current_repo)
+ self.info("repositories: %s" % repos)
+ self.vcs_checkout_repos(repos, parent_dir=dirs['abs_work_dir'],
+ tag_override=config.get('tag_override'))
+
+ def clone_locales(self):
+ self.pull_locale_source()
+
+ def setup(self):
+ """setup step"""
+ dirs = self.query_abs_dirs()
+ self._run_tooltool()
+ self._copy_mozconfig()
+ self._mach_configure()
+ self._run_make_in_config_dir()
+ self.make_wget_en_US()
+ self.make_unpack_en_US()
+ self.download_mar_tools()
+
+ # on try we want the source we already have, otherwise update to the
+ # same as the en-US binary
+ if self.config.get("update_gecko_source_to_enUS", True):
+ revision = self._query_enUS_revision()
+ # TODO do this through VCSMixin instead of hardcoding hg
+ # self.update(dest=dirs["abs_mozilla_dir"], revision=revision)
+ hg = self.query_exe("hg")
+ self.run_command([hg, "update", "-r", revision],
+ cwd=dirs["abs_mozilla_dir"],
+ env=self.query_bootstrap_env(),
+ error_list=BaseErrorList,
+ halt_on_failure=True, fatal_exit_code=3)
+ # if checkout updates CLOBBER file with a newer timestamp,
+ # next make -f client.mk configure will delete archives
+ # downloaded with make wget_en_US, so just touch CLOBBER file
+ _clobber_file = self._clobber_file()
+ if os.path.exists(_clobber_file):
+ self._touch_file(_clobber_file)
+ # and again...
+ # thanks to the last hg update, we can be on different firefox 'version'
+ # than the one on default,
+ self._mach_configure()
+ self._run_make_in_config_dir()
+
+ def _run_make_in_config_dir(self):
+ """this step creates nsinstall, needed my make_wget_en_US()
+ """
+ dirs = self.query_abs_dirs()
+ config_dir = os.path.join(dirs['abs_objdir'], 'config')
+ env = self.query_bootstrap_env()
+ return self._make(target=['export'], cwd=config_dir, env=env)
+
+ def _clobber_file(self):
+ """returns the full path of the clobber file"""
+ config = self.config
+ dirs = self.query_abs_dirs()
+ return os.path.join(dirs['abs_objdir'], config.get('clobber_file'))
+
+ def _copy_mozconfig(self):
+ """copies the mozconfig file into abs_mozilla_dir/.mozconfig
+ and logs the content
+ """
+ config = self.config
+ dirs = self.query_abs_dirs()
+ mozconfig = config['mozconfig']
+ src = os.path.join(dirs['abs_work_dir'], mozconfig)
+ dst = os.path.join(dirs['abs_mozilla_dir'], '.mozconfig')
+ self.copyfile(src, dst)
+ self.read_from_file(dst, verbose=True)
+
+ def _mach(self, target, env, halt_on_failure=True, output_parser=None):
+ dirs = self.query_abs_dirs()
+ mach = self._get_mach_executable()
+ return self.run_command(mach + target,
+ halt_on_failure=True,
+ env=env,
+ cwd=dirs['abs_mozilla_dir'],
+ output_parser=None)
+
+ def _mach_configure(self):
+ """calls mach configure"""
+ env = self.query_bootstrap_env()
+ target = ["configure"]
+ return self._mach(target=target, env=env)
+
+ def _get_mach_executable(self):
+ python = self.query_exe('python2.7')
+ return [python, 'mach']
+
+ def _get_make_executable(self):
+ config = self.config
+ dirs = self.query_abs_dirs()
+ if config.get('enable_mozmake'): # e.g. windows
+ make = r"/".join([dirs['abs_mozilla_dir'], 'mozmake.exe'])
+ # mysterious subprocess errors, let's try to fix this path...
+ make = make.replace('\\', '/')
+ make = [make]
+ else:
+ make = ['make']
+ return make
+
+ def _make(self, target, cwd, env, error_list=MakefileErrorList,
+ halt_on_failure=True, output_parser=None):
+ """Runs make. Returns the exit code"""
+ make = self._get_make_executable()
+ if target:
+ make = make + target
+ return self.run_command(make,
+ cwd=cwd,
+ env=env,
+ error_list=error_list,
+ halt_on_failure=halt_on_failure,
+ output_parser=output_parser)
+
+ def _get_output_from_make(self, target, cwd, env, halt_on_failure=True, ignore_errors=False):
+ """runs make and returns the output of the command"""
+ make = self._get_make_executable()
+ return self.get_output_from_command(make + target,
+ cwd=cwd,
+ env=env,
+ silent=True,
+ halt_on_failure=halt_on_failure,
+ ignore_errors=ignore_errors)
+
+ def make_unpack_en_US(self):
+ """wrapper for make unpack"""
+ config = self.config
+ dirs = self.query_abs_dirs()
+ env = self.query_bootstrap_env()
+ cwd = os.path.join(dirs['abs_objdir'], config['locales_dir'])
+ return self._make(target=["unpack"], cwd=cwd, env=env)
+
+ def make_wget_en_US(self):
+ """wrapper for make wget-en-US"""
+ env = self.query_bootstrap_env()
+ dirs = self.query_abs_dirs()
+ cwd = dirs['abs_locales_dir']
+ return self._make(target=["wget-en-US"], cwd=cwd, env=env)
+
+ def make_upload(self, locale):
+ """wrapper for make upload command"""
+ config = self.config
+ env = self.query_l10n_env()
+ dirs = self.query_abs_dirs()
+ buildid = self._query_buildid()
+ replace_dict = {
+ 'buildid': buildid,
+ 'branch': config['branch']
+ }
+ try:
+ env['POST_UPLOAD_CMD'] = config['base_post_upload_cmd'] % replace_dict
+ except KeyError:
+ # no base_post_upload_cmd in configuration, just skip it
+ pass
+ target = ['upload', 'AB_CD=%s' % (locale)]
+ cwd = dirs['abs_locales_dir']
+ parser = MakeUploadOutputParser(config=self.config,
+ log_obj=self.log_obj)
+ retval = self._make(target=target, cwd=cwd, env=env,
+ halt_on_failure=False, output_parser=parser)
+ if locale not in self.package_urls:
+ self.package_urls[locale] = {}
+ self.package_urls[locale].update(parser.matches)
+ if retval == SUCCESS:
+ self.info('Upload successful (%s)' % locale)
+ ret = SUCCESS
+ else:
+ self.error('failed to upload %s' % locale)
+ ret = FAILURE
+ return ret
+
+ def set_upload_files(self, locale):
+ # The tree doesn't have a good way of exporting the list of files
+ # created during locale generation, but we can grab them by echoing the
+ # UPLOAD_FILES variable for each locale.
+ env = self.query_l10n_env()
+ target = ['echo-variable-UPLOAD_FILES', 'echo-variable-CHECKSUM_FILES',
+ 'AB_CD=%s' % locale]
+ dirs = self.query_abs_dirs()
+ cwd = dirs['abs_locales_dir']
+ # Bug 1242771 - echo-variable-UPLOAD_FILES via mozharness fails when stderr is found
+ # we should ignore stderr as unfortunately it's expected when parsing for values
+ output = self._get_output_from_make(target=target, cwd=cwd, env=env,
+ ignore_errors=True)
+ self.info('UPLOAD_FILES is "%s"' % output)
+ files = shlex.split(output)
+ if not files:
+ self.error('failed to get upload file list for locale %s' % locale)
+ return FAILURE
+
+ self.upload_files[locale] = [
+ os.path.abspath(os.path.join(cwd, f)) for f in files
+ ]
+ return SUCCESS
+
+ def make_installers(self, locale):
+ """wrapper for make installers-(locale)"""
+ env = self.query_l10n_env()
+ self._copy_mozconfig()
+ dirs = self.query_abs_dirs()
+ cwd = os.path.join(dirs['abs_locales_dir'])
+ target = ["installers-%s" % locale,
+ "LOCALE_MERGEDIR=%s" % env["LOCALE_MERGEDIR"], ]
+ return self._make(target=target, cwd=cwd,
+ env=env, halt_on_failure=False)
+
+ def repack_locale(self, locale):
+ """wraps the logic for compare locale, make installers and generating
+ complete updates."""
+
+ if self.run_compare_locales(locale) != SUCCESS:
+ self.error("compare locale %s failed" % (locale))
+ return FAILURE
+
+ # compare locale succeeded, run make installers
+ if self.make_installers(locale) != SUCCESS:
+ self.error("make installers-%s failed" % (locale))
+ return FAILURE
+
+ # now try to upload the artifacts
+ if self.make_upload(locale):
+ self.error("make upload for locale %s failed!" % (locale))
+ return FAILURE
+
+ # set_upload_files() should be called after make upload, to make sure
+ # we have all files in place (checksums, etc)
+ if self.set_upload_files(locale):
+ self.error("failed to get list of files to upload for locale %s" % locale)
+ return FAILURE
+
+ return SUCCESS
+
+ def repack(self):
+ """creates the repacks and udpates"""
+ self._map(self.repack_locale, self.query_locales())
+
+ def _query_objdir(self):
+ """returns objdir name from configuration"""
+ return self.config['objdir']
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(DesktopSingleLocale, self).query_abs_dirs()
+ for directory in abs_dirs:
+ value = abs_dirs[directory]
+ abs_dirs[directory] = value
+ dirs = {}
+ dirs['abs_tools_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'tools')
+ for key in dirs.keys():
+ if key not in abs_dirs:
+ abs_dirs[key] = dirs[key]
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ def submit_to_balrog(self):
+ """submit to balrog"""
+ if not self.config.get("balrog_servers"):
+ self.info("balrog_servers not set; skipping balrog submission.")
+ return
+ self.info("Reading buildbot build properties...")
+ self.read_buildbot_config()
+ # get platform, appName and hashType from configuration
+ # common values across different locales
+ config = self.config
+ platform = config["platform"]
+ hashType = config['hashType']
+ appName = config['appName']
+ branch = config['branch']
+ # values from configuration
+ self.set_buildbot_property("branch", branch)
+ self.set_buildbot_property("appName", appName)
+ # it's hardcoded to sha512 in balrog.py
+ self.set_buildbot_property("hashType", hashType)
+ self.set_buildbot_property("platform", platform)
+ # values common to the current repacks
+ self.set_buildbot_property("buildid", self._query_buildid())
+ self.set_buildbot_property("appVersion", self.query_version())
+
+ # submit complete mar to balrog
+ # clean up buildbot_properties
+ self._map(self.submit_repack_to_balrog, self.query_locales())
+
+ def submit_repack_to_balrog(self, locale):
+ """submit a single locale to balrog"""
+ # check if locale has been uploaded, if not just return a FAILURE
+ if locale not in self.package_urls:
+ self.error("%s is not present in package_urls. Did you run make upload?" % locale)
+ return FAILURE
+
+ if not self.query_is_nightly():
+ # remove this check when we extend this script to non-nightly builds
+ self.fatal("Not a nightly build")
+ return FAILURE
+
+ # complete mar file
+ c_marfile = self._query_complete_mar_filename(locale)
+ c_mar_url = self._query_complete_mar_url(locale)
+
+ # Set other necessary properties for Balrog submission. None need to
+ # be passed back to buildbot, so we won't write them to the properties
+ # files
+ # Locale is hardcoded to en-US, for silly reasons
+ # The Balrog submitter translates this platform into a build target
+ # via https://github.com/mozilla/build-tools/blob/master/lib/python/release/platforms.py#L23
+ self.set_buildbot_property("completeMarSize", self.query_filesize(c_marfile))
+ self.set_buildbot_property("completeMarHash", self.query_sha512sum(c_marfile))
+ self.set_buildbot_property("completeMarUrl", c_mar_url)
+ self.set_buildbot_property("locale", locale)
+ if "partialInfo" in self.package_urls[locale]:
+ self.set_buildbot_property("partialInfo",
+ self.package_urls[locale]["partialInfo"])
+ ret = FAILURE
+ try:
+ result = self.submit_balrog_updates()
+ self.info("balrog return code: %s" % (result))
+ if result == 0:
+ ret = SUCCESS
+ except Exception as error:
+ self.error("submit repack to balrog failed: %s" % (str(error)))
+ return ret
+
+ def _query_complete_mar_filename(self, locale):
+ """returns the full path to a localized complete mar file"""
+ config = self.config
+ version = self.query_version()
+ complete_mar_name = config['localized_mar'] % {'version': version,
+ 'locale': locale}
+ return os.path.join(self._update_mar_dir(), complete_mar_name)
+
+ def _query_complete_mar_url(self, locale):
+ """returns the complete mar url taken from self.package_urls[locale]
+ this value is available only after make_upload"""
+ if "complete_mar_url" in self.config:
+ return self.config["complete_mar_url"]
+ if "completeMarUrl" in self.package_urls[locale]:
+ return self.package_urls[locale]["completeMarUrl"]
+ # url = self.config.get("update", {}).get("mar_base_url")
+ # if url:
+ # url += os.path.basename(self.query_marfile_path())
+ # return url.format(branch=self.query_branch())
+ self.fatal("Couldn't find complete mar url in config or package_urls")
+
+ def _update_mar_dir(self):
+ """returns the full path of the update/ directory"""
+ return self._mar_dir('update_mar_dir')
+
+ def _mar_binaries(self):
+ """returns a tuple with mar and mbsdiff paths"""
+ config = self.config
+ return (config['mar'], config['mbsdiff'])
+
+ def _mar_dir(self, dirname):
+ """returns the full path of dirname;
+ dirname is an entry in configuration"""
+ dirs = self.query_abs_dirs()
+ return os.path.join(dirs['abs_objdir'], self.config[dirname])
+
+ # TODO: replace with ToolToolMixin
+ def _get_tooltool_auth_file(self):
+ # set the default authentication file based on platform; this
+ # corresponds to where puppet puts the token
+ if 'tooltool_authentication_file' in self.config:
+ fn = self.config['tooltool_authentication_file']
+ elif self._is_windows():
+ fn = r'c:\builds\relengapi.tok'
+ else:
+ fn = '/builds/relengapi.tok'
+
+ # if the file doesn't exist, don't pass it to tooltool (it will just
+ # fail). In taskcluster, this will work OK as the relengapi-proxy will
+ # take care of auth. Everywhere else, we'll get auth failures if
+ # necessary.
+ if os.path.exists(fn):
+ return fn
+
+ def _run_tooltool(self):
+ config = self.config
+ dirs = self.query_abs_dirs()
+ if not config.get('tooltool_manifest_src'):
+ return self.warning(ERROR_MSGS['tooltool_manifest_undetermined'])
+ fetch_script_path = os.path.join(dirs['abs_tools_dir'],
+ 'scripts/tooltool/tooltool_wrapper.sh')
+ tooltool_manifest_path = os.path.join(dirs['abs_mozilla_dir'],
+ config['tooltool_manifest_src'])
+ cmd = [
+ 'sh',
+ fetch_script_path,
+ tooltool_manifest_path,
+ config['tooltool_url'],
+ config['tooltool_bootstrap'],
+ ]
+ cmd.extend(config['tooltool_script'])
+ auth_file = self._get_tooltool_auth_file()
+ if auth_file and os.path.exists(auth_file):
+ cmd.extend(['--authentication-file', auth_file])
+ cache = config['bootstrap_env'].get('TOOLTOOL_CACHE')
+ if cache:
+ cmd.extend(['-c', cache])
+ self.info(str(cmd))
+ self.run_command(cmd, cwd=dirs['abs_mozilla_dir'], halt_on_failure=True)
+
+ def funsize_props(self):
+ """Set buildbot properties required to trigger funsize tasks
+ responsible to generate partial updates for successfully generated locales"""
+ locales = self.query_locales()
+ funsize_info = {
+ 'locales': locales,
+ 'branch': self.config['branch'],
+ 'appName': self.config['appName'],
+ 'platform': self.config['platform'],
+ 'completeMarUrls': {locale: self._query_complete_mar_url(locale) for locale in locales},
+ }
+ self.info('funsize info: %s' % funsize_info)
+ self.set_buildbot_property('funsize_info', json.dumps(funsize_info),
+ write_to_file=True)
+
+ def taskcluster_upload(self):
+ auth = os.path.join(os.getcwd(), self.config['taskcluster_credentials_file'])
+ credentials = {}
+ execfile(auth, credentials)
+ client_id = credentials.get('taskcluster_clientId')
+ access_token = credentials.get('taskcluster_accessToken')
+ if not client_id or not access_token:
+ self.warning('Skipping S3 file upload: No taskcluster credentials.')
+ return
+
+ # We need to activate the virtualenv so that we can import taskcluster
+ # (and its dependent modules, like requests and hawk). Normally we
+ # could create the virtualenv as an action, but due to some odd
+ # dependencies with query_build_env() being called from build(), which
+ # is necessary before the virtualenv can be created.
+ self.disable_mock()
+ self.create_virtualenv()
+ self.enable_mock()
+ self.activate_virtualenv()
+
+ branch = self.config['branch']
+ revision = self._query_revision()
+ repo = self.query_l10n_repo()
+ if not repo:
+ self.fatal("Unable to determine repository for querying the push info.")
+ pushinfo = self.vcs_query_pushinfo(repo, revision, vcs='hg')
+ pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(pushinfo.pushdate))
+
+ routes_json = os.path.join(self.query_abs_dirs()['abs_mozilla_dir'],
+ 'testing/mozharness/configs/routes.json')
+ with open(routes_json) as f:
+ contents = json.load(f)
+ templates = contents['l10n']
+
+ # Release promotion creates a special task to accumulate all artifacts
+ # under the same task
+ artifacts_task = None
+ self.read_buildbot_config()
+ if "artifactsTaskId" in self.buildbot_config.get("properties", {}):
+ artifacts_task_id = self.buildbot_config["properties"]["artifactsTaskId"]
+ artifacts_tc = Taskcluster(
+ branch=branch, rank=pushinfo.pushdate, client_id=client_id,
+ access_token=access_token, log_obj=self.log_obj,
+ task_id=artifacts_task_id)
+ artifacts_task = artifacts_tc.get_task(artifacts_task_id)
+ artifacts_tc.claim_task(artifacts_task)
+
+ for locale, files in self.upload_files.iteritems():
+ self.info("Uploading files to S3 for locale '%s': %s" % (locale, files))
+ routes = []
+ for template in templates:
+ fmt = {
+ 'index': self.config.get('taskcluster_index', 'index.garbage.staging'),
+ 'project': branch,
+ 'head_rev': revision,
+ 'pushdate': pushdate,
+ 'year': pushdate[0:4],
+ 'month': pushdate[4:6],
+ 'day': pushdate[6:8],
+ 'build_product': self.config['stage_product'],
+ 'build_name': self.query_build_name(),
+ 'build_type': self.query_build_type(),
+ 'locale': locale,
+ }
+ fmt.update(self.buildid_to_dict(self._query_buildid()))
+ routes.append(template.format(**fmt))
+
+ self.info('Using routes: %s' % routes)
+ tc = Taskcluster(branch,
+ pushinfo.pushdate, # Use pushdate as the rank
+ client_id,
+ access_token,
+ self.log_obj,
+ )
+ task = tc.create_task(routes)
+ tc.claim_task(task)
+
+ for upload_file in files:
+ # Create an S3 artifact for each file that gets uploaded. We also
+ # check the uploaded file against the property conditions so that we
+ # can set the buildbot config with the correct URLs for package
+ # locations.
+ artifact_url = tc.create_artifact(task, upload_file)
+ if artifacts_task:
+ artifacts_tc.create_reference_artifact(
+ artifacts_task, upload_file, artifact_url)
+
+ tc.report_completed(task)
+
+ if artifacts_task:
+ if not self.query_failed_locales():
+ artifacts_tc.report_completed(artifacts_task)
+ else:
+ # If some locales fail, we want to mark the artifacts
+ # task failed, so a retry can reuse the same task ID
+ artifacts_tc.report_failed(artifacts_task)
+
+
+# main {{{
+if __name__ == '__main__':
+ single_locale = DesktopSingleLocale()
+ single_locale.run_and_exit()
diff --git a/testing/mozharness/scripts/desktop_partner_repacks.py b/testing/mozharness/scripts/desktop_partner_repacks.py
new file mode 100755
index 000000000..ff07dffc8
--- /dev/null
+++ b/testing/mozharness/scripts/desktop_partner_repacks.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""desktop_partner_repacks.py
+
+This script manages Desktop partner repacks for beta/release builds.
+"""
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.purge import PurgeMixin
+from mozharness.mozilla.release import ReleaseMixin
+from mozharness.base.python import VirtualenvMixin
+from mozharness.base.log import FATAL
+
+
+# DesktopPartnerRepacks {{{1
+class DesktopPartnerRepacks(ReleaseMixin, BuildbotMixin, PurgeMixin,
+ BaseScript, VirtualenvMixin):
+ """Manages desktop partner repacks"""
+ actions = [
+ "clobber",
+ "create-virtualenv",
+ "activate-virtualenv",
+ "setup",
+ "repack",
+ "summary",
+ ]
+ config_options = [
+ [["--version", "-v"], {
+ "dest": "version",
+ "help": "Version of Firefox to repack",
+ }],
+ [["--build-number", "-n"], {
+ "dest": "build_number",
+ "help": "Build number of Firefox to repack",
+ }],
+ [["--platform"], {
+ "dest": "platform",
+ "help": "Platform to repack (e.g. linux64, macosx64, ...)",
+ }],
+ [["--partner", "-p"], {
+ "dest": "partner",
+ "help": "Limit repackaging to partners matching this string",
+ }],
+ [["--s3cfg"], {
+ "dest": "s3cfg",
+ "help": "Configuration file for uploading to S3 using s3cfg",
+ }],
+ [["--hgroot"], {
+ "dest": "hgroot",
+ "help": "Use a different hg server for retrieving files",
+ }],
+ [["--hgrepo"], {
+ "dest": "hgrepo",
+ "help": "Use a different base repo for retrieving files",
+ }],
+ [["--require-buildprops"], {
+ "action": "store_true",
+ "dest": "require_buildprops",
+ "default": False,
+ "help": "Read in config options (like partner) from the buildbot properties file."
+ }],
+ ]
+
+ def __init__(self):
+ # fxbuild style:
+ buildscript_kwargs = {
+ 'all_actions': DesktopPartnerRepacks.actions,
+ 'default_actions': DesktopPartnerRepacks.actions,
+ 'config': {
+ 'buildbot_json_path': 'buildprops.json',
+ "log_name": "partner-repacks",
+ "hashType": "sha512",
+ 'virtualenv_modules': [
+ 'requests==2.2.1',
+ 'PyHawk-with-a-single-extra-commit==0.1.5',
+ 'taskcluster==0.0.15',
+ 's3cmd==1.6.0',
+ ],
+ 'virtualenv_path': 'venv',
+ 'workdir': 'partner-repacks',
+ },
+ }
+ #
+
+ BaseScript.__init__(
+ self,
+ config_options=self.config_options,
+ **buildscript_kwargs
+ )
+
+
+ def _pre_config_lock(self, rw_config):
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ else:
+ if self.config.get('require_buildprops', False) is True:
+ if not self.buildbot_config:
+ self.fatal("Unable to load properties from file: %s" % self.config.get('buildbot_json_path'))
+ props = self.buildbot_config["properties"]
+ for prop in ['version', 'build_number', 'revision', 'repo_file', 'repack_manifests_url', 'partner']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+
+ if 'version' not in self.config:
+ self.fatal("Version (-v) not supplied.")
+ if 'build_number' not in self.config:
+ self.fatal("Build number (-n) not supplied.")
+ if 'repo_file' not in self.config:
+ self.fatal("repo_file not supplied.")
+ if 'repack_manifests_url' not in self.config:
+ self.fatal("repack_manifests_url not supplied.")
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(DesktopPartnerRepacks, self).query_abs_dirs()
+ for directory in abs_dirs:
+ value = abs_dirs[directory]
+ abs_dirs[directory] = value
+ dirs = {}
+ dirs['abs_repo_dir'] = os.path.join(abs_dirs['abs_work_dir'], '.repo')
+ dirs['abs_partners_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'partners')
+ dirs['abs_scripts_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'scripts')
+ for key in dirs.keys():
+ if key not in abs_dirs:
+ abs_dirs[key] = dirs[key]
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ # Actions {{{
+ def _repo_cleanup(self):
+ self.rmtree(self.query_abs_dirs()['abs_repo_dir'])
+ self.rmtree(self.query_abs_dirs()['abs_partners_dir'])
+ self.rmtree(self.query_abs_dirs()['abs_scripts_dir'])
+
+ def _repo_init(self, repo):
+ status = self.run_command([repo, "init", "--no-repo-verify",
+ "-u", self.config['repack_manifests_url']],
+ cwd=self.query_abs_dirs()['abs_work_dir'])
+ if status:
+ return status
+ return self.run_command([repo, "sync"],
+ cwd=self.query_abs_dirs()['abs_work_dir'])
+
+ def setup(self):
+ """setup step"""
+ repo = self.download_file(self.config['repo_file'],
+ file_name='repo',
+ parent_dir=self.query_abs_dirs()['abs_work_dir'],
+ error_level=FATAL)
+ if not os.path.exists(repo):
+ self.fatal("Unable to download repo tool.")
+ self.chmod(repo, 0755)
+ self.retry(self._repo_init,
+ args=(repo,),
+ error_level=FATAL,
+ cleanup=self._repo_cleanup(),
+ good_statuses=[0],
+ sleeptime=5)
+
+ def repack(self):
+ """creates the repacks"""
+ python = self.query_exe("python2.7")
+ repack_cmd = [python, "partner-repacks.py",
+ "-v", self.config['version'],
+ "-n", str(self.config['build_number'])]
+ if self.config.get('platform'):
+ repack_cmd.extend(["--platform", self.config['platform']])
+ if self.config.get('partner'):
+ repack_cmd.extend(["--partner", self.config['partner']])
+ if self.config.get('s3cfg'):
+ repack_cmd.extend(["--s3cfg", self.config['s3cfg']])
+ if self.config.get('hgroot'):
+ repack_cmd.extend(["--hgroot", self.config['hgroot']])
+ if self.config.get('hgrepo'):
+ repack_cmd.extend(["--repo", self.config['hgrepo']])
+ if self.config.get('revision'):
+ repack_cmd.extend(["--tag", self.config["revision"]])
+
+ return self.run_command(repack_cmd,
+ cwd=self.query_abs_dirs()['abs_scripts_dir'])
+
+# main {{{
+if __name__ == '__main__':
+ partner_repacks = DesktopPartnerRepacks()
+ partner_repacks.run_and_exit()
diff --git a/testing/mozharness/scripts/desktop_unittest.py b/testing/mozharness/scripts/desktop_unittest.py
new file mode 100755
index 000000000..b2e754567
--- /dev/null
+++ b/testing/mozharness/scripts/desktop_unittest.py
@@ -0,0 +1,742 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""desktop_unittest.py
+The goal of this is to extract desktop unittesting from buildbot's factory.py
+
+author: Jordan Lund
+"""
+
+import os
+import re
+import sys
+import copy
+import shutil
+import glob
+import imp
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.errors import BaseErrorList
+from mozharness.base.log import INFO, ERROR
+from mozharness.base.script import PreScriptAction
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
+from mozharness.mozilla.buildbot import TBPL_EXCEPTION
+from mozharness.mozilla.mozbase import MozbaseMixin
+from mozharness.mozilla.structuredlog import StructuredOutputParser
+from mozharness.mozilla.testing.errors import HarnessErrorList
+from mozharness.mozilla.testing.unittest import DesktopUnittestOutputParser
+from mozharness.mozilla.testing.codecoverage import (
+ CodeCoverageMixin,
+ code_coverage_config_options
+)
+from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
+
+SUITE_CATEGORIES = ['gtest', 'cppunittest', 'jittest', 'mochitest', 'reftest', 'xpcshell', 'mozbase', 'mozmill']
+SUITE_DEFAULT_E10S = ['mochitest', 'reftest']
+
+
+# DesktopUnittest {{{1
+class DesktopUnittest(TestingMixin, MercurialScript, BlobUploadMixin, MozbaseMixin, CodeCoverageMixin):
+ config_options = [
+ [['--mochitest-suite', ], {
+ "action": "extend",
+ "dest": "specified_mochitest_suites",
+ "type": "string",
+ "help": "Specify which mochi suite to run. "
+ "Suites are defined in the config file.\n"
+ "Examples: 'all', 'plain1', 'plain5', 'chrome', or 'a11y'"}
+ ],
+ [['--reftest-suite', ], {
+ "action": "extend",
+ "dest": "specified_reftest_suites",
+ "type": "string",
+ "help": "Specify which reftest suite to run. "
+ "Suites are defined in the config file.\n"
+ "Examples: 'all', 'crashplan', or 'jsreftest'"}
+ ],
+ [['--xpcshell-suite', ], {
+ "action": "extend",
+ "dest": "specified_xpcshell_suites",
+ "type": "string",
+ "help": "Specify which xpcshell suite to run. "
+ "Suites are defined in the config file\n."
+ "Examples: 'xpcshell'"}
+ ],
+ [['--cppunittest-suite', ], {
+ "action": "extend",
+ "dest": "specified_cppunittest_suites",
+ "type": "string",
+ "help": "Specify which cpp unittest suite to run. "
+ "Suites are defined in the config file\n."
+ "Examples: 'cppunittest'"}
+ ],
+ [['--gtest-suite', ], {
+ "action": "extend",
+ "dest": "specified_gtest_suites",
+ "type": "string",
+ "help": "Specify which gtest suite to run. "
+ "Suites are defined in the config file\n."
+ "Examples: 'gtest'"}
+ ],
+ [['--jittest-suite', ], {
+ "action": "extend",
+ "dest": "specified_jittest_suites",
+ "type": "string",
+ "help": "Specify which jit-test suite to run. "
+ "Suites are defined in the config file\n."
+ "Examples: 'jittest'"}
+ ],
+ [['--mozbase-suite', ], {
+ "action": "extend",
+ "dest": "specified_mozbase_suites",
+ "type": "string",
+ "help": "Specify which mozbase suite to run. "
+ "Suites are defined in the config file\n."
+ "Examples: 'mozbase'"}
+ ],
+ [['--mozmill-suite', ], {
+ "action": "extend",
+ "dest": "specified_mozmill_suites",
+ "type": "string",
+ "help": "Specify which mozmill suite to run. "
+ "Suites are defined in the config file\n."
+ "Examples: 'mozmill'"}
+ ],
+ [['--run-all-suites', ], {
+ "action": "store_true",
+ "dest": "run_all_suites",
+ "default": False,
+ "help": "This will run all suites that are specified "
+ "in the config file. You do not need to specify "
+ "any other suites.\nBeware, this may take a while ;)"}
+ ],
+ [['--e10s', ], {
+ "action": "store_true",
+ "dest": "e10s",
+ "default": False,
+ "help": "Run tests with multiple processes."}
+ ],
+ [['--strict-content-sandbox', ], {
+ "action": "store_true",
+ "dest": "strict_content_sandbox",
+ "default": False,
+ "help": "Run tests with a more strict content sandbox (Windows only)."}
+ ],
+ [['--no-random', ], {
+ "action": "store_true",
+ "dest": "no_random",
+ "default": False,
+ "help": "Run tests with no random intermittents and bisect in case of real failure."}
+ ],
+ [["--total-chunks"], {
+ "action": "store",
+ "dest": "total_chunks",
+ "help": "Number of total chunks"}
+ ],
+ [["--this-chunk"], {
+ "action": "store",
+ "dest": "this_chunk",
+ "help": "Number of this chunk"}
+ ],
+ [["--allow-software-gl-layers"], {
+ "action": "store_true",
+ "dest": "allow_software_gl_layers",
+ "default": False,
+ "help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor."}
+ ],
+ ] + copy.deepcopy(testing_config_options) + \
+ copy.deepcopy(blobupload_config_options) + \
+ copy.deepcopy(code_coverage_config_options)
+
+ def __init__(self, require_config_file=True):
+ # abs_dirs defined already in BaseScript but is here to make pylint happy
+ self.abs_dirs = None
+ super(DesktopUnittest, self).__init__(
+ config_options=self.config_options,
+ all_actions=[
+ 'clobber',
+ 'read-buildbot-config',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'stage-files',
+ 'run-tests',
+ ],
+ require_config_file=require_config_file,
+ config={'require_test_zip': True})
+
+ c = self.config
+ self.global_test_options = []
+ self.installer_url = c.get('installer_url')
+ self.test_url = c.get('test_url')
+ self.test_packages_url = c.get('test_packages_url')
+ self.symbols_url = c.get('symbols_url')
+ # this is so mozinstall in install() doesn't bug out if we don't run
+ # the download_and_extract action
+ self.installer_path = c.get('installer_path')
+ self.binary_path = c.get('binary_path')
+ self.abs_app_dir = None
+ self.abs_res_dir = None
+
+ # Construct an identifier to be used to identify Perfherder data
+ # for resource monitoring recording. This attempts to uniquely
+ # identify this test invocation configuration.
+ perfherder_parts = []
+ perfherder_options = []
+ suites = (
+ ('specified_mochitest_suites', 'mochitest'),
+ ('specified_reftest_suites', 'reftest'),
+ ('specified_xpcshell_suites', 'xpcshell'),
+ ('specified_cppunittest_suites', 'cppunit'),
+ ('specified_gtest_suites', 'gtest'),
+ ('specified_jittest_suites', 'jittest'),
+ ('specified_mozbase_suites', 'mozbase'),
+ ('specified_mozmill_suites', 'mozmill'),
+ )
+ for s, prefix in suites:
+ if s in c:
+ perfherder_parts.append(prefix)
+ perfherder_parts.extend(c[s])
+
+ if 'this_chunk' in c:
+ perfherder_parts.append(c['this_chunk'])
+
+ if c['e10s']:
+ perfherder_options.append('e10s')
+
+ self.resource_monitor_perfherder_id = ('.'.join(perfherder_parts),
+ perfherder_options)
+
+ # helper methods {{{2
+ def _pre_config_lock(self, rw_config):
+ super(DesktopUnittest, self)._pre_config_lock(rw_config)
+ c = self.config
+ if not c.get('run_all_suites'):
+ return # configs are valid
+ for category in SUITE_CATEGORIES:
+ specific_suites = c.get('specified_%s_suites' % (category))
+ if specific_suites:
+ if specific_suites != 'all':
+ self.fatal("Config options are not valid. Please ensure"
+ " that if the '--run-all-suites' flag was enabled,"
+ " then do not specify to run only specific suites "
+ "like:\n '--mochitest-suite browser-chrome'")
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(DesktopUnittest, self).query_abs_dirs()
+
+ c = self.config
+ dirs = {}
+ dirs['abs_app_install_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'application')
+ dirs['abs_test_install_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'tests')
+ dirs['abs_test_extensions_dir'] = os.path.join(dirs['abs_test_install_dir'], 'extensions')
+ dirs['abs_test_bin_dir'] = os.path.join(dirs['abs_test_install_dir'], 'bin')
+ dirs['abs_test_bin_plugins_dir'] = os.path.join(dirs['abs_test_bin_dir'],
+ 'plugins')
+ dirs['abs_test_bin_components_dir'] = os.path.join(dirs['abs_test_bin_dir'],
+ 'components')
+ dirs['abs_mochitest_dir'] = os.path.join(dirs['abs_test_install_dir'], "mochitest")
+ dirs['abs_reftest_dir'] = os.path.join(dirs['abs_test_install_dir'], "reftest")
+ dirs['abs_xpcshell_dir'] = os.path.join(dirs['abs_test_install_dir'], "xpcshell")
+ dirs['abs_cppunittest_dir'] = os.path.join(dirs['abs_test_install_dir'], "cppunittest")
+ dirs['abs_gtest_dir'] = os.path.join(dirs['abs_test_install_dir'], "gtest")
+ dirs['abs_blob_upload_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'blobber_upload_dir')
+ dirs['abs_jittest_dir'] = os.path.join(dirs['abs_test_install_dir'], "jit-test", "jit-test")
+ dirs['abs_mozbase_dir'] = os.path.join(dirs['abs_test_install_dir'], "mozbase")
+ dirs['abs_mozmill_dir'] = os.path.join(dirs['abs_test_install_dir'], "mozmill")
+
+ if os.path.isabs(c['virtualenv_path']):
+ dirs['abs_virtualenv_dir'] = c['virtualenv_path']
+ else:
+ dirs['abs_virtualenv_dir'] = os.path.join(abs_dirs['abs_work_dir'],
+ c['virtualenv_path'])
+ abs_dirs.update(dirs)
+ self.abs_dirs = abs_dirs
+
+ return self.abs_dirs
+
+ def query_abs_app_dir(self):
+ """We can't set this in advance, because OSX install directories
+ change depending on branding and opt/debug.
+ """
+ if self.abs_app_dir:
+ return self.abs_app_dir
+ if not self.binary_path:
+ self.fatal("Can't determine abs_app_dir (binary_path not set!)")
+ self.abs_app_dir = os.path.dirname(self.binary_path)
+ return self.abs_app_dir
+
+ def query_abs_res_dir(self):
+ """The directory containing resources like plugins and extensions. On
+ OSX this is Contents/Resources, on all other platforms its the same as
+ the app dir.
+
+ As with the app dir, we can't set this in advance, because OSX install
+ directories change depending on branding and opt/debug.
+ """
+ if self.abs_res_dir:
+ return self.abs_res_dir
+
+ abs_app_dir = self.query_abs_app_dir()
+ if self._is_darwin():
+ res_subdir = self.config.get("mac_res_subdir", "Resources")
+ self.abs_res_dir = os.path.join(os.path.dirname(abs_app_dir), res_subdir)
+ else:
+ self.abs_res_dir = abs_app_dir
+ return self.abs_res_dir
+
+ @PreScriptAction('create-virtualenv')
+ def _pre_create_virtualenv(self, action):
+ dirs = self.query_abs_dirs()
+
+ self.register_virtualenv_module(name='pip>=1.5')
+ self.register_virtualenv_module('psutil==3.1.1', method='pip')
+ self.register_virtualenv_module(name='mock')
+ self.register_virtualenv_module(name='simplejson')
+
+ requirements_files = [
+ os.path.join(dirs['abs_test_install_dir'],
+ 'config',
+ 'marionette_requirements.txt')]
+
+ if os.path.isdir(dirs['abs_mochitest_dir']):
+ # mochitest is the only thing that needs this
+ requirements_files.append(
+ os.path.join(dirs['abs_mochitest_dir'],
+ 'websocketprocessbridge',
+ 'websocketprocessbridge_requirements.txt'))
+
+ for requirements_file in requirements_files:
+ self.register_virtualenv_module(requirements=[requirements_file],
+ two_pass=True)
+
+ def _query_symbols_url(self):
+ """query the full symbols URL based upon binary URL"""
+ # may break with name convention changes but is one less 'input' for script
+ if self.symbols_url:
+ return self.symbols_url
+
+ symbols_url = None
+ self.info("finding symbols_url based upon self.installer_url")
+ if self.installer_url:
+ for ext in ['.zip', '.dmg', '.tar.bz2']:
+ if ext in self.installer_url:
+ symbols_url = self.installer_url.replace(
+ ext, '.crashreporter-symbols.zip')
+ if not symbols_url:
+ self.fatal("self.installer_url was found but symbols_url could \
+ not be determined")
+ else:
+ self.fatal("self.installer_url was not found in self.config")
+ self.info("setting symbols_url as %s" % (symbols_url))
+ self.symbols_url = symbols_url
+ return self.symbols_url
+
+ def _query_abs_base_cmd(self, suite_category, suite):
+ if self.binary_path:
+ c = self.config
+ dirs = self.query_abs_dirs()
+ run_file = c['run_file_names'][suite_category]
+ base_cmd = [self.query_python_path('python'), '-u']
+ base_cmd.append(os.path.join(dirs["abs_%s_dir" % suite_category], run_file))
+ abs_app_dir = self.query_abs_app_dir()
+ abs_res_dir = self.query_abs_res_dir()
+
+ raw_log_file = os.path.join(dirs['abs_blob_upload_dir'],
+ '%s_raw.log' % suite)
+
+ error_summary_file = os.path.join(dirs['abs_blob_upload_dir'],
+ '%s_errorsummary.log' % suite)
+ str_format_values = {
+ 'binary_path': self.binary_path,
+ 'symbols_path': self._query_symbols_url(),
+ 'abs_app_dir': abs_app_dir,
+ 'abs_res_dir': abs_res_dir,
+ 'raw_log_file': raw_log_file,
+ 'error_summary_file': error_summary_file,
+ 'gtest_dir': os.path.join(dirs['abs_test_install_dir'],
+ 'gtest'),
+ }
+
+ # TestingMixin._download_and_extract_symbols() will set
+ # self.symbols_path when downloading/extracting.
+ if self.symbols_path:
+ str_format_values['symbols_path'] = self.symbols_path
+
+ if suite_category in SUITE_DEFAULT_E10S and not c['e10s']:
+ base_cmd.append('--disable-e10s')
+ elif suite_category not in SUITE_DEFAULT_E10S and c['e10s']:
+ base_cmd.append('--e10s')
+
+ if c.get('strict_content_sandbox'):
+ if suite_category == "mochitest":
+ base_cmd.append('--strict-content-sandbox')
+ else:
+ self.fatal("--strict-content-sandbox only works with mochitest suites.")
+
+ if c.get('total_chunks') and c.get('this_chunk'):
+ base_cmd.extend(['--total-chunks', c['total_chunks'],
+ '--this-chunk', c['this_chunk']])
+
+ if c['no_random']:
+ if suite_category == "mochitest":
+ base_cmd.append('--bisect-chunk=default')
+ else:
+ self.warning("--no-random does not currently work with suites other than mochitest.")
+
+ # set pluginsPath
+ abs_res_plugins_dir = os.path.join(abs_res_dir, 'plugins')
+ str_format_values['test_plugin_path'] = abs_res_plugins_dir
+
+ if suite_category not in c["suite_definitions"]:
+ self.fatal("'%s' not defined in the config!")
+
+ if suite in ('browser-chrome-coverage', 'xpcshell-coverage', 'mochitest-devtools-chrome-coverage'):
+ base_cmd.append('--jscov-dir-prefix=%s' %
+ dirs['abs_blob_upload_dir'])
+
+ options = c["suite_definitions"][suite_category]["options"]
+ if options:
+ for option in options:
+ option = option % str_format_values
+ if not option.endswith('None'):
+ base_cmd.append(option)
+ if self.structured_output(
+ suite_category,
+ self._query_try_flavor(suite_category, suite)
+ ):
+ base_cmd.append("--log-raw=-")
+ return base_cmd
+ else:
+ self.warning("Suite options for %s could not be determined."
+ "\nIf you meant to have options for this suite, "
+ "please make sure they are specified in your "
+ "config under %s_options" %
+ (suite_category, suite_category))
+
+ return base_cmd
+ else:
+ self.fatal("'binary_path' could not be determined.\n This should "
+ "be like '/path/build/application/firefox/firefox'"
+ "\nIf you are running this script without the 'install' "
+ "action (where binary_path is set), please ensure you are"
+ " either:\n(1) specifying it in the config file under "
+ "binary_path\n(2) specifying it on command line with the"
+ " '--binary-path' flag")
+
+ def _query_specified_suites(self, category):
+ # logic goes: if at least one '--{category}-suite' was given,
+ # then run only that(those) given suite(s). Elif no suites were
+ # specified and the --run-all-suites flag was given,
+ # run all {category} suites. Anything else, run no suites.
+ c = self.config
+ all_suites = c.get('all_%s_suites' % (category))
+ specified_suites = c.get('specified_%s_suites' % (category)) # list
+ suites = None
+
+ if specified_suites:
+ if 'all' in specified_suites:
+ # useful if you want a quick way of saying run all suites
+ # of a specific category.
+ suites = all_suites
+ else:
+ # suites gets a dict of everything from all_suites where a key
+ # is also in specified_suites
+ suites = dict((key, all_suites.get(key)) for key in
+ specified_suites if key in all_suites.keys())
+ else:
+ if c.get('run_all_suites'): # needed if you dont specify any suites
+ suites = all_suites
+
+ return suites
+
+ def _query_try_flavor(self, category, suite):
+ flavors = {
+ "mochitest": [("plain.*", "mochitest"),
+ ("browser-chrome.*", "browser-chrome"),
+ ("mochitest-devtools-chrome.*", "devtools-chrome"),
+ ("chrome", "chrome"),
+ ("jetpack.*", "jetpack")],
+ "xpcshell": [("xpcshell", "xpcshell")],
+ "reftest": [("reftest", "reftest"),
+ ("crashtest", "crashtest")]
+ }
+ for suite_pattern, flavor in flavors.get(category, []):
+ if re.compile(suite_pattern).match(suite):
+ return flavor
+
+ def structured_output(self, suite_category, flavor=None):
+ unstructured_flavors = self.config.get('unstructured_flavors')
+ if not unstructured_flavors:
+ return False
+ if suite_category not in unstructured_flavors:
+ return True
+ if not unstructured_flavors.get(suite_category) or flavor in unstructured_flavors.get(suite_category):
+ return False
+ return True
+
+ def get_test_output_parser(self, suite_category, flavor=None, strict=False,
+ **kwargs):
+ if not self.structured_output(suite_category, flavor):
+ return DesktopUnittestOutputParser(suite_category=suite_category, **kwargs)
+ self.info("Structured output parser in use for %s." % suite_category)
+ return StructuredOutputParser(suite_category=suite_category, strict=strict, **kwargs)
+
+ # Actions {{{2
+
+ # clobber defined in BaseScript, deletes mozharness/build if exists
+ # read_buildbot_config is in BuildbotMixin.
+ # postflight_read_buildbot_config is in TestingMixin.
+ # preflight_download_and_extract is in TestingMixin.
+ # create_virtualenv is in VirtualenvMixin.
+ # preflight_install is in TestingMixin.
+ # install is in TestingMixin.
+ # upload_blobber_files is in BlobUploadMixin
+
+ @PreScriptAction('download-and-extract')
+ def _pre_download_and_extract(self, action):
+ """Abort if --artifact try syntax is used with compiled-code tests"""
+ if not self.try_message_has_flag('artifact'):
+ return
+ self.info('Artifact build requested in try syntax.')
+ rejected = []
+ compiled_code_suites = [
+ "cppunit",
+ "gtest",
+ "jittest",
+ ]
+ for category in SUITE_CATEGORIES:
+ suites = self._query_specified_suites(category) or []
+ for suite in suites:
+ if any([suite.startswith(c) for c in compiled_code_suites]):
+ rejected.append(suite)
+ break
+ if rejected:
+ self.buildbot_status(TBPL_EXCEPTION)
+ self.fatal("There are specified suites that are incompatible with "
+ "--artifact try syntax flag: {}".format(', '.join(rejected)),
+ exit_code=self.return_code)
+
+
+ def download_and_extract(self):
+ """
+ download and extract test zip / download installer
+ optimizes which subfolders to extract from tests zip
+ """
+ c = self.config
+
+ extract_dirs = None
+ if c['specific_tests_zip_dirs']:
+ extract_dirs = list(c['minimum_tests_zip_dirs'])
+ for category in c['specific_tests_zip_dirs'].keys():
+ if c['run_all_suites'] or self._query_specified_suites(category) \
+ or 'run-tests' not in self.actions:
+ extract_dirs.extend(c['specific_tests_zip_dirs'][category])
+
+ if c.get('run_all_suites'):
+ target_categories = SUITE_CATEGORIES
+ else:
+ target_categories = [cat for cat in SUITE_CATEGORIES
+ if self._query_specified_suites(cat) is not None]
+ super(DesktopUnittest, self).download_and_extract(extract_dirs=extract_dirs,
+ suite_categories=target_categories)
+
+ def stage_files(self):
+ for category in SUITE_CATEGORIES:
+ suites = self._query_specified_suites(category)
+ stage = getattr(self, '_stage_{}'.format(category), None)
+ if suites and stage:
+ stage(suites)
+
+ def _stage_files(self, bin_name=None):
+ dirs = self.query_abs_dirs()
+ abs_app_dir = self.query_abs_app_dir()
+
+ # For mac these directories are in Contents/Resources, on other
+ # platforms abs_res_dir will point to abs_app_dir.
+ abs_res_dir = self.query_abs_res_dir()
+ abs_res_components_dir = os.path.join(abs_res_dir, 'components')
+ abs_res_plugins_dir = os.path.join(abs_res_dir, 'plugins')
+ abs_res_extensions_dir = os.path.join(abs_res_dir, 'extensions')
+
+ if bin_name:
+ self.info('copying %s to %s' % (os.path.join(dirs['abs_test_bin_dir'],
+ bin_name), os.path.join(abs_app_dir, bin_name)))
+ shutil.copy2(os.path.join(dirs['abs_test_bin_dir'], bin_name),
+ os.path.join(abs_app_dir, bin_name))
+
+ self.copytree(dirs['abs_test_bin_components_dir'],
+ abs_res_components_dir,
+ overwrite='overwrite_if_exists')
+ self.mkdir_p(abs_res_plugins_dir)
+ self.copytree(dirs['abs_test_bin_plugins_dir'],
+ abs_res_plugins_dir,
+ overwrite='overwrite_if_exists')
+ if os.path.isdir(dirs['abs_test_extensions_dir']):
+ self.mkdir_p(abs_res_extensions_dir)
+ self.copytree(dirs['abs_test_extensions_dir'],
+ abs_res_extensions_dir,
+ overwrite='overwrite_if_exists')
+
+ def _stage_xpcshell(self, suites):
+ self._stage_files(self.config['xpcshell_name'])
+
+ def _stage_cppunittest(self, suites):
+ abs_res_dir = self.query_abs_res_dir()
+ dirs = self.query_abs_dirs()
+ abs_cppunittest_dir = dirs['abs_cppunittest_dir']
+
+ # move manifest and js fils to resources dir, where tests expect them
+ files = glob.glob(os.path.join(abs_cppunittest_dir, '*.js'))
+ files.extend(glob.glob(os.path.join(abs_cppunittest_dir, '*.manifest')))
+ for f in files:
+ self.move(f, abs_res_dir)
+
+ def _stage_gtest(self, suites):
+ abs_res_dir = self.query_abs_res_dir()
+ abs_app_dir = self.query_abs_app_dir()
+ dirs = self.query_abs_dirs()
+ abs_gtest_dir = dirs['abs_gtest_dir']
+ dirs['abs_test_bin_dir'] = os.path.join(dirs['abs_test_install_dir'], 'bin')
+
+ files = glob.glob(os.path.join(dirs['abs_test_bin_plugins_dir'], 'gmp-*'))
+ files.append(os.path.join(abs_gtest_dir, 'dependentlibs.list.gtest'))
+ for f in files:
+ self.move(f, abs_res_dir)
+
+ self.copytree(os.path.join(abs_gtest_dir, 'gtest_bin'),
+ os.path.join(abs_app_dir))
+
+ def _stage_mozmill(self, suites):
+ self._stage_files()
+ dirs = self.query_abs_dirs()
+ modules = ['jsbridge', 'mozmill']
+ for module in modules:
+ self.install_module(module=os.path.join(dirs['abs_mozmill_dir'],
+ 'resources',
+ module))
+
+ # pull defined in VCSScript.
+ # preflight_run_tests defined in TestingMixin.
+
+ def run_tests(self):
+ for category in SUITE_CATEGORIES:
+ self._run_category_suites(category)
+
+ def get_timeout_for_category(self, suite_category):
+ if suite_category == 'cppunittest':
+ return 2500
+ return self.config["suite_definitions"][suite_category].get('run_timeout', 1000)
+
+ def _run_category_suites(self, suite_category):
+ """run suite(s) to a specific category"""
+ dirs = self.query_abs_dirs()
+ suites = self._query_specified_suites(suite_category)
+ abs_app_dir = self.query_abs_app_dir()
+ abs_res_dir = self.query_abs_res_dir()
+
+ if suites:
+ self.info('#### Running %s suites' % suite_category)
+ for suite in suites:
+ abs_base_cmd = self._query_abs_base_cmd(suite_category, suite)
+ cmd = abs_base_cmd[:]
+ replace_dict = {
+ 'abs_app_dir': abs_app_dir,
+
+ # Mac specific, but points to abs_app_dir on other
+ # platforms.
+ 'abs_res_dir': abs_res_dir,
+ }
+ options_list = []
+ env = {}
+ if isinstance(suites[suite], dict):
+ options_list = suites[suite].get('options', [])
+ tests_list = suites[suite].get('tests', [])
+ env = copy.deepcopy(suites[suite].get('env', {}))
+ else:
+ options_list = suites[suite]
+ tests_list = []
+
+ flavor = self._query_try_flavor(suite_category, suite)
+ try_options, try_tests = self.try_args(flavor)
+
+ cmd.extend(self.query_options(options_list,
+ try_options,
+ str_format_values=replace_dict))
+ cmd.extend(self.query_tests_args(tests_list,
+ try_tests,
+ str_format_values=replace_dict))
+
+ suite_name = suite_category + '-' + suite
+ tbpl_status, log_level = None, None
+ error_list = BaseErrorList + HarnessErrorList
+ parser = self.get_test_output_parser(suite_category,
+ flavor=flavor,
+ config=self.config,
+ error_list=error_list,
+ log_obj=self.log_obj)
+
+ if suite_category == "reftest":
+ ref_formatter = imp.load_source(
+ "ReftestFormatter",
+ os.path.abspath(
+ os.path.join(dirs["abs_reftest_dir"], "output.py")))
+ parser.formatter = ref_formatter.ReftestFormatter()
+
+ if self.query_minidump_stackwalk():
+ env['MINIDUMP_STACKWALK'] = self.minidump_stackwalk_path
+ if self.query_nodejs():
+ env['MOZ_NODE_PATH'] = self.nodejs_path
+ env['MOZ_UPLOAD_DIR'] = self.query_abs_dirs()['abs_blob_upload_dir']
+ env['MINIDUMP_SAVE_PATH'] = self.query_abs_dirs()['abs_blob_upload_dir']
+ if not os.path.isdir(env['MOZ_UPLOAD_DIR']):
+ self.mkdir_p(env['MOZ_UPLOAD_DIR'])
+
+ if self.config['allow_software_gl_layers']:
+ env['MOZ_LAYERS_ALLOW_SOFTWARE_GL'] = '1'
+
+ env = self.query_env(partial_env=env, log_level=INFO)
+ cmd_timeout = self.get_timeout_for_category(suite_category)
+ return_code = self.run_command(cmd, cwd=dirs['abs_work_dir'],
+ output_timeout=cmd_timeout,
+ output_parser=parser,
+ env=env)
+
+ # mochitest, reftest, and xpcshell suites do not return
+ # appropriate return codes. Therefore, we must parse the output
+ # to determine what the tbpl_status and worst_log_level must
+ # be. We do this by:
+ # 1) checking to see if our mozharness script ran into any
+ # errors itself with 'num_errors' <- OutputParser
+ # 2) if num_errors is 0 then we look in the subclassed 'parser'
+ # findings for harness/suite errors <- DesktopUnittestOutputParser
+ # 3) checking to see if the return code is in success_codes
+
+ success_codes = None
+ if self._is_windows() and suite_category != 'gtest':
+ # bug 1120644
+ success_codes = [0, 1]
+
+ tbpl_status, log_level = parser.evaluate_parser(return_code,
+ success_codes=success_codes)
+ parser.append_tinderboxprint_line(suite_name)
+
+ self.buildbot_status(tbpl_status, level=log_level)
+ self.log("The %s suite: %s ran with return status: %s" %
+ (suite_category, suite, tbpl_status), level=log_level)
+ else:
+ self.debug('There were no suites to run for %s' % suite_category)
+
+
+# main {{{1
+if __name__ == '__main__':
+ desktop_unittest = DesktopUnittest()
+ desktop_unittest.run_and_exit()
diff --git a/testing/mozharness/scripts/firefox_media_tests_buildbot.py b/testing/mozharness/scripts/firefox_media_tests_buildbot.py
new file mode 100644
index 000000000..17b830f0f
--- /dev/null
+++ b/testing/mozharness/scripts/firefox_media_tests_buildbot.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** BEGIN LICENSE BLOCK *****
+"""firefox_media_tests_buildbot.py
+
+Author: Maja Frydrychowicz
+"""
+import copy
+import glob
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.log import DEBUG, ERROR, INFO
+from mozharness.base.script import PostScriptAction
+from mozharness.mozilla.blob_upload import (
+ BlobUploadMixin,
+ blobupload_config_options
+)
+from mozharness.mozilla.buildbot import (
+ TBPL_SUCCESS, TBPL_WARNING, TBPL_FAILURE
+)
+from mozharness.mozilla.testing.firefox_media_tests import (
+ FirefoxMediaTestsBase, TESTFAILED, SUCCESS
+)
+
+
+class FirefoxMediaTestsBuildbot(FirefoxMediaTestsBase, BlobUploadMixin):
+
+ def __init__(self):
+ config_options = copy.deepcopy(blobupload_config_options)
+ super(FirefoxMediaTestsBuildbot, self).__init__(
+ config_options=config_options,
+ all_actions=['clobber',
+ 'read-buildbot-config',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-media-tests',
+ ],
+ )
+
+ def run_media_tests(self):
+ status = super(FirefoxMediaTestsBuildbot, self).run_media_tests()
+ if status == SUCCESS:
+ tbpl_status = TBPL_SUCCESS
+ else:
+ tbpl_status = TBPL_FAILURE
+ if status == TESTFAILED:
+ tbpl_status = TBPL_WARNING
+ self.buildbot_status(tbpl_status)
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(FirefoxMediaTestsBuildbot, self).query_abs_dirs()
+ dirs = {
+ 'abs_blob_upload_dir': os.path.join(abs_dirs['abs_work_dir'],
+ 'blobber_upload_dir')
+ }
+ abs_dirs.update(dirs)
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ def _query_cmd(self):
+ cmd = super(FirefoxMediaTestsBuildbot, self)._query_cmd()
+ dirs = self.query_abs_dirs()
+ # configure logging
+ blob_upload_dir = dirs.get('abs_blob_upload_dir')
+ cmd += ['--gecko-log', os.path.join(blob_upload_dir, 'gecko.log')]
+ cmd += ['--log-html', os.path.join(blob_upload_dir, 'media_tests.html')]
+ cmd += ['--log-mach', os.path.join(blob_upload_dir, 'media_tests_mach.log')]
+ return cmd
+
+ @PostScriptAction('run-media-tests')
+ def _collect_uploads(self, action, success=None):
+ """ Copy extra (log) files to blob upload dir. """
+ dirs = self.query_abs_dirs()
+ log_dir = dirs.get('abs_log_dir')
+ blob_upload_dir = dirs.get('abs_blob_upload_dir')
+ if not log_dir or not blob_upload_dir:
+ return
+ self.mkdir_p(blob_upload_dir)
+ # Move firefox-media-test screenshots into log_dir
+ screenshots_dir = os.path.join(dirs['base_work_dir'],
+ 'screenshots')
+ log_screenshots_dir = os.path.join(log_dir, 'screenshots')
+ if os.access(log_screenshots_dir, os.F_OK):
+ self.rmtree(log_screenshots_dir)
+ if os.access(screenshots_dir, os.F_OK):
+ self.move(screenshots_dir, log_screenshots_dir)
+
+ # logs to upload: broadest level (info), error, screenshots
+ uploads = glob.glob(os.path.join(log_screenshots_dir, '*'))
+ log_files = self.log_obj.log_files
+ log_level = self.log_obj.log_level
+
+ def append_path(filename, dir=log_dir):
+ if filename:
+ uploads.append(os.path.join(dir, filename))
+
+ append_path(log_files.get(ERROR))
+ # never upload debug logs
+ if log_level == DEBUG:
+ append_path(log_files.get(INFO))
+ else:
+ append_path(log_files.get(log_level))
+ # in case of SimpleFileLogger
+ append_path(log_files.get('default'))
+ for f in uploads:
+ if os.access(f, os.F_OK):
+ dest = os.path.join(blob_upload_dir, os.path.basename(f))
+ self.copyfile(f, dest)
+
+
+if __name__ == '__main__':
+ media_test = FirefoxMediaTestsBuildbot()
+ media_test.run_and_exit()
diff --git a/testing/mozharness/scripts/firefox_media_tests_jenkins.py b/testing/mozharness/scripts/firefox_media_tests_jenkins.py
new file mode 100755
index 000000000..e35655257
--- /dev/null
+++ b/testing/mozharness/scripts/firefox_media_tests_jenkins.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** BEGIN LICENSE BLOCK *****
+"""firefox_media_tests_jenkins.py
+
+Author: Syd Polk
+"""
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.mozilla.testing.firefox_media_tests import (
+ FirefoxMediaTestsBase
+)
+
+
+class FirefoxMediaTestsJenkins(FirefoxMediaTestsBase):
+
+ def __init__(self):
+ super(FirefoxMediaTestsJenkins, self).__init__(
+ all_actions=['clobber',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-media-tests',
+ ],
+ )
+
+ def _query_cmd(self):
+ cmd = super(FirefoxMediaTestsJenkins, self)._query_cmd()
+
+ dirs = self.query_abs_dirs()
+
+ # configure logging
+ log_dir = dirs.get('abs_log_dir')
+ cmd += ['--gecko-log', os.path.join(log_dir, 'gecko.log')]
+ cmd += ['--log-html', os.path.join(log_dir, 'media_tests.html')]
+ cmd += ['--log-mach', os.path.join(log_dir, 'media_tests_mach.log')]
+
+ return cmd
+
+if __name__ == '__main__':
+ media_test = FirefoxMediaTestsJenkins()
+ media_test.run_and_exit()
diff --git a/testing/mozharness/scripts/firefox_media_tests_taskcluster.py b/testing/mozharness/scripts/firefox_media_tests_taskcluster.py
new file mode 100644
index 000000000..7a0121dca
--- /dev/null
+++ b/testing/mozharness/scripts/firefox_media_tests_taskcluster.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** BEGIN LICENSE BLOCK *****
+"""firefox_media_tests_taskcluster.py
+
+Adapted from firefox_media_tests_buildbot.py
+
+Author: Bryce Van Dyk
+"""
+import copy
+import glob
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.log import DEBUG, ERROR, INFO
+from mozharness.base.script import PostScriptAction
+from mozharness.mozilla.blob_upload import (
+ BlobUploadMixin,
+ blobupload_config_options
+)
+from mozharness.mozilla.testing.firefox_media_tests import (
+ FirefoxMediaTestsBase, TESTFAILED, SUCCESS
+)
+
+
+class FirefoxMediaTestsTaskcluster(FirefoxMediaTestsBase):
+
+ def __init__(self):
+ config_options = copy.deepcopy(blobupload_config_options)
+ super(FirefoxMediaTestsTaskcluster, self).__init__(
+ config_options=config_options,
+ all_actions=['clobber',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-media-tests',
+ ],
+ )
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(FirefoxMediaTestsTaskcluster, self).query_abs_dirs()
+ dirs = {
+ 'abs_blob_upload_dir': os.path.join(abs_dirs['abs_work_dir'],
+ 'blobber_upload_dir')
+ }
+ abs_dirs.update(dirs)
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ def _query_cmd(self):
+ cmd = super(FirefoxMediaTestsTaskcluster, self)._query_cmd()
+ dirs = self.query_abs_dirs()
+ # configure logging
+ blob_upload_dir = dirs.get('abs_blob_upload_dir')
+ cmd += ['--gecko-log', os.path.join(blob_upload_dir, 'gecko.log')]
+ cmd += ['--log-html', os.path.join(blob_upload_dir, 'media_tests.html')]
+ cmd += ['--log-mach', os.path.join(blob_upload_dir, 'media_tests_mach.log')]
+ return cmd
+
+ @PostScriptAction('run-media-tests')
+ def _collect_uploads(self, action, success=None):
+ """ Copy extra (log) files to blob upload dir. """
+ dirs = self.query_abs_dirs()
+ log_dir = dirs.get('abs_log_dir')
+ blob_upload_dir = dirs.get('abs_blob_upload_dir')
+ if not log_dir or not blob_upload_dir:
+ return
+ self.mkdir_p(blob_upload_dir)
+ # Move firefox-media-test screenshots into log_dir
+ screenshots_dir = os.path.join(dirs['base_work_dir'],
+ 'screenshots')
+ log_screenshots_dir = os.path.join(log_dir, 'screenshots')
+ if os.access(log_screenshots_dir, os.F_OK):
+ self.rmtree(log_screenshots_dir)
+ if os.access(screenshots_dir, os.F_OK):
+ self.move(screenshots_dir, log_screenshots_dir)
+
+ # logs to upload: broadest level (info), error, screenshots
+ uploads = glob.glob(os.path.join(log_screenshots_dir, '*'))
+ log_files = self.log_obj.log_files
+ log_level = self.log_obj.log_level
+
+ def append_path(filename, dir=log_dir):
+ if filename:
+ uploads.append(os.path.join(dir, filename))
+
+ append_path(log_files.get(ERROR))
+ # never upload debug logs
+ if log_level == DEBUG:
+ append_path(log_files.get(INFO))
+ else:
+ append_path(log_files.get(log_level))
+ # in case of SimpleFileLogger
+ append_path(log_files.get('default'))
+ for f in uploads:
+ if os.access(f, os.F_OK):
+ dest = os.path.join(blob_upload_dir, os.path.basename(f))
+ self.copyfile(f, dest)
+
+
+if __name__ == '__main__':
+ media_test = FirefoxMediaTestsTaskcluster()
+ media_test.run_and_exit()
diff --git a/testing/mozharness/scripts/firefox_ui_tests/functional.py b/testing/mozharness/scripts/firefox_ui_tests/functional.py
new file mode 100755
index 000000000..58048ad33
--- /dev/null
+++ b/testing/mozharness/scripts/firefox_ui_tests/functional.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.mozilla.testing.firefox_ui_tests import FirefoxUIFunctionalTests
+
+
+if __name__ == '__main__':
+ myScript = FirefoxUIFunctionalTests()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/firefox_ui_tests/update.py b/testing/mozharness/scripts/firefox_ui_tests/update.py
new file mode 100755
index 000000000..c8f5842b7
--- /dev/null
+++ b/testing/mozharness/scripts/firefox_ui_tests/update.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.mozilla.testing.firefox_ui_tests import FirefoxUIUpdateTests
+
+
+if __name__ == '__main__':
+ myScript = FirefoxUIUpdateTests()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/firefox_ui_tests/update_release.py b/testing/mozharness/scripts/firefox_ui_tests/update_release.py
new file mode 100755
index 000000000..f1ec81646
--- /dev/null
+++ b/testing/mozharness/scripts/firefox_ui_tests/update_release.py
@@ -0,0 +1,323 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+
+import copy
+import os
+import pprint
+import sys
+import urllib
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import PreScriptAction
+from mozharness.mozilla.buildbot import TBPL_SUCCESS, TBPL_WARNING, EXIT_STATUS_DICT
+from mozharness.mozilla.testing.firefox_ui_tests import (
+ FirefoxUIUpdateTests,
+ firefox_ui_update_config_options
+)
+
+
+# Command line arguments for release update tests
+firefox_ui_update_release_config_options = [
+ [['--build-number'], {
+ 'dest': 'build_number',
+ 'help': 'Build number of release, eg: 2',
+ }],
+ [['--limit-locales'], {
+ 'dest': 'limit_locales',
+ 'default': -1,
+ 'type': int,
+ 'help': 'Limit the number of locales to run.',
+ }],
+ [['--release-update-config'], {
+ 'dest': 'release_update_config',
+ 'help': 'Name of the release update verification config file to use.',
+ }],
+ [['--this-chunk'], {
+ 'dest': 'this_chunk',
+ 'default': 1,
+ 'help': 'What chunk of locales to process.',
+ }],
+ [['--tools-repo'], {
+ 'dest': 'tools_repo',
+ 'default': 'http://hg.mozilla.org/build/tools',
+ 'help': 'Which tools repo to check out',
+ }],
+ [['--tools-tag'], {
+ 'dest': 'tools_tag',
+ 'help': 'Which revision/tag to use for the tools repository.',
+ }],
+ [['--total-chunks'], {
+ 'dest': 'total_chunks',
+ 'default': 1,
+ 'help': 'Total chunks to dive the locales into.',
+ }],
+] + copy.deepcopy(firefox_ui_update_config_options)
+
+
+class ReleaseFirefoxUIUpdateTests(FirefoxUIUpdateTests):
+
+ def __init__(self):
+ all_actions = [
+ 'clobber',
+ 'checkout',
+ 'create-virtualenv',
+ 'query_minidump_stackwalk',
+ 'read-release-update-config',
+ 'run-tests',
+ ]
+
+ super(ReleaseFirefoxUIUpdateTests, self).__init__(
+ all_actions=all_actions,
+ default_actions=all_actions,
+ config_options=firefox_ui_update_release_config_options,
+ append_env_variables_from_configs=True,
+ )
+
+ self.tools_repo = self.config.get('tools_repo')
+ self.tools_tag = self.config.get('tools_tag')
+
+ assert self.tools_repo and self.tools_tag, \
+ 'Without the "--tools-tag" we can\'t clone the releng\'s tools repository.'
+
+ self.limit_locales = int(self.config.get('limit_locales'))
+
+ # This will be a list containing one item per release based on configs
+ # from tools/release/updates/*cfg
+ self.releases = None
+
+ def checkout(self):
+ """
+ We checkout the tools repository and update to the right branch
+ for it.
+ """
+ dirs = self.query_abs_dirs()
+
+ super(ReleaseFirefoxUIUpdateTests, self).checkout()
+
+ self.vcs_checkout(
+ repo=self.tools_repo,
+ dest=dirs['abs_tools_dir'],
+ branch=self.tools_tag,
+ vcs='hg'
+ )
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+
+ abs_dirs = super(ReleaseFirefoxUIUpdateTests, self).query_abs_dirs()
+ dirs = {
+ 'abs_tools_dir': os.path.join(abs_dirs['abs_work_dir'], 'tools'),
+ }
+
+ for key in dirs:
+ if key not in abs_dirs:
+ abs_dirs[key] = dirs[key]
+ self.abs_dirs = abs_dirs
+
+ return self.abs_dirs
+
+ def read_release_update_config(self):
+ '''
+ Builds a testing matrix based on an update verification configuration
+ file under the tools repository (release/updates/*.cfg).
+
+ Each release info line of the update verification files look similar to the following.
+
+ NOTE: This shows each pair of information as a new line but in reality
+ there is one white space separting them. We only show the values we care for.
+
+ release="38.0"
+ platform="Linux_x86_64-gcc3"
+ build_id="20150429135941"
+ locales="ach af ... zh-TW"
+ channel="beta-localtest"
+ from="/firefox/releases/38.0b9/linux-x86_64/%locale%/firefox-38.0b9.tar.bz2"
+ ftp_server_from="http://archive.mozilla.org/pub"
+
+ We will store this information in self.releases as a list of releases.
+
+ NOTE: We will talk of full and quick releases. Full release info normally contains a subset
+ of all locales (except for the most recent releases). A quick release has all locales,
+ however, it misses the fields 'from' and 'ftp_server_from'.
+ Both pairs of information complement each other but differ in such manner.
+ '''
+ dirs = self.query_abs_dirs()
+ assert os.path.exists(dirs['abs_tools_dir']), \
+ 'Without the tools/ checkout we can\'t use releng\'s config parser.'
+
+ if self.config.get('release_update_config'):
+ # The config file is part of the tools repository. Make sure that if specified
+ # we force a revision of that repository to be set.
+ if self.tools_tag is None:
+ self.fatal('Make sure to specify the --tools-tag')
+
+ self.release_update_config = self.config['release_update_config']
+
+ # Import the config parser
+ sys.path.insert(1, os.path.join(dirs['abs_tools_dir'], 'lib', 'python'))
+ from release.updates.verify import UpdateVerifyConfig
+
+ uvc = UpdateVerifyConfig()
+ config_file = os.path.join(dirs['abs_tools_dir'], 'release', 'updates',
+ self.config['release_update_config'])
+ uvc.read(config_file)
+ if not hasattr(self, 'update_channel'):
+ self.update_channel = uvc.channel
+
+ # Filter out any releases that are less than Gecko 38
+ uvc.releases = [r for r in uvc.releases
+ if int(r['release'].split('.')[0]) >= 38]
+
+ temp_releases = []
+ for rel_info in uvc.releases:
+ # This is the full release info
+ if 'from' in rel_info and rel_info['from'] is not None:
+ # Let's find the associated quick release which contains the remaining locales
+ # for all releases except for the most recent release which contain all locales
+ quick_release = uvc.getRelease(build_id=rel_info['build_id'], from_path=None)
+ if quick_release != {}:
+ rel_info['locales'] = sorted(rel_info['locales'] + quick_release['locales'])
+ temp_releases.append(rel_info)
+
+ uvc.releases = temp_releases
+ chunked_config = uvc.getChunk(
+ chunks=int(self.config['total_chunks']),
+ thisChunk=int(self.config['this_chunk'])
+ )
+
+ self.releases = chunked_config.releases
+
+ @PreScriptAction('run-tests')
+ def _pre_run_tests(self, action):
+ assert ('release_update_config' in self.config or
+ self.installer_url or self.installer_path), \
+ 'Either specify --update-verify-config, --installer-url or --installer-path.'
+
+ def run_tests(self):
+ dirs = self.query_abs_dirs()
+
+ # We don't want multiple outputs of the same environment information. To prevent
+ # that, we can't make it an argument of run_command and have to print it on our own.
+ self.info('Using env: {}'.format(pprint.pformat(self.query_env())))
+
+ results = {}
+
+ locales_counter = 0
+ for rel_info in sorted(self.releases, key=lambda release: release['build_id']):
+ build_id = rel_info['build_id']
+ results[build_id] = {}
+
+ self.info('About to run {buildid} {path} - {num_locales} locales'.format(
+ buildid=build_id,
+ path=rel_info['from'],
+ num_locales=len(rel_info['locales'])
+ ))
+
+ # Each locale gets a fresh port to avoid address in use errors in case of
+ # tests that time out unexpectedly.
+ marionette_port = 2827
+ for locale in rel_info['locales']:
+ locales_counter += 1
+ self.info('Running {buildid} {locale}'.format(buildid=build_id,
+ locale=locale))
+
+ if self.limit_locales > -1 and locales_counter > self.limit_locales:
+ self.info('We have reached the limit of locales we were intending to run')
+ break
+
+ if self.config['dry_run']:
+ continue
+
+ # Determine from where to download the file
+ installer_url = '{server}/{fragment}'.format(
+ server=rel_info['ftp_server_from'],
+ fragment=urllib.quote(rel_info['from'].replace('%locale%', locale))
+ )
+ installer_path = self.download_file(
+ url=installer_url,
+ parent_dir=dirs['abs_work_dir']
+ )
+
+ binary_path = self.install_app(app=self.config.get('application'),
+ installer_path=installer_path)
+
+ marionette_port += 1
+
+ retcode = self.run_test(
+ binary_path=binary_path,
+ env=self.query_env(avoid_host_env=True),
+ marionette_port=marionette_port,
+ )
+
+ self.uninstall_app()
+
+ # Remove installer which is not needed anymore
+ self.info('Removing {}'.format(installer_path))
+ os.remove(installer_path)
+
+ if retcode:
+ self.warning('FAIL: {} has failed.'.format(sys.argv[0]))
+
+ base_cmd = 'python {command} --firefox-ui-branch {branch} ' \
+ '--release-update-config {config} --tools-tag {tag}'.format(
+ command=sys.argv[0],
+ branch=self.firefox_ui_branch,
+ config=self.release_update_config,
+ tag=self.tools_tag
+ )
+
+ for config in self.config['config_files']:
+ base_cmd += ' --cfg {}'.format(config)
+
+ if self.symbols_url:
+ base_cmd += ' --symbols-path {}'.format(self.symbols_url)
+
+ base_cmd += ' --installer-url {}'.format(installer_url)
+
+ self.info('You can run the *specific* locale on the same machine with:')
+ self.info(base_cmd)
+
+ self.info('You can run the *specific* locale on *your* machine with:')
+ self.info('{} --cfg developer_config.py'.format(base_cmd))
+
+ results[build_id][locale] = retcode
+
+ self.info('Completed {buildid} {locale} with return code: {retcode}'.format(
+ buildid=build_id,
+ locale=locale,
+ retcode=retcode))
+
+ if self.limit_locales > -1 and locales_counter > self.limit_locales:
+ break
+
+ # Determine which locales have failed and set scripts exit code
+ exit_status = TBPL_SUCCESS
+ for build_id in sorted(results.keys()):
+ failed_locales = []
+ for locale in sorted(results[build_id].keys()):
+ if results[build_id][locale] != 0:
+ failed_locales.append(locale)
+
+ if failed_locales:
+ if exit_status == TBPL_SUCCESS:
+ self.info('\nSUMMARY - Failed locales for {}:'.format(self.cli_script))
+ self.info('====================================================')
+ exit_status = TBPL_WARNING
+
+ self.info(build_id)
+ self.info(' {}'.format(', '.join(failed_locales)))
+
+ self.return_code = EXIT_STATUS_DICT[exit_status]
+
+
+if __name__ == '__main__':
+ myScript = ReleaseFirefoxUIUpdateTests()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/fx_desktop_build.py b/testing/mozharness/scripts/fx_desktop_build.py
new file mode 100755
index 000000000..40f20442c
--- /dev/null
+++ b/testing/mozharness/scripts/fx_desktop_build.py
@@ -0,0 +1,235 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""fx_desktop_build.py.
+
+script harness to build nightly firefox within Mozilla's build environment
+and developer machines alike
+
+author: Jordan Lund
+
+"""
+
+import copy
+import pprint
+import sys
+import os
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+import mozharness.base.script as script
+from mozharness.mozilla.building.buildbase import BUILD_BASE_CONFIG_OPTIONS, \
+ BuildingConfig, BuildOptionParser, BuildScript
+from mozharness.base.config import parse_config_file
+from mozharness.mozilla.testing.try_tools import TryToolsMixin, try_config_options
+
+
+class FxDesktopBuild(BuildScript, TryToolsMixin, object):
+ def __init__(self):
+ buildscript_kwargs = {
+ 'config_options': BUILD_BASE_CONFIG_OPTIONS + copy.deepcopy(try_config_options),
+ 'all_actions': [
+ 'get-secrets',
+ 'clobber',
+ 'clone-tools',
+ 'checkout-sources',
+ 'setup-mock',
+ 'build',
+ 'upload-files', # upload from BB to TC
+ 'sendchange',
+ 'check-test',
+ 'valgrind-test',
+ 'package-source',
+ 'generate-source-signing-manifest',
+ 'multi-l10n',
+ 'generate-build-stats',
+ 'update',
+ ],
+ 'require_config_file': True,
+ # Default configuration
+ 'config': {
+ 'is_automation': True,
+ "pgo_build": False,
+ "debug_build": False,
+ "pgo_platforms": ['linux', 'linux64', 'win32', 'win64'],
+ # nightly stuff
+ "nightly_build": False,
+ 'balrog_credentials_file': 'oauth.txt',
+ 'taskcluster_credentials_file': 'oauth.txt',
+ 'periodic_clobber': 168,
+ # hg tool stuff
+ "tools_repo": "https://hg.mozilla.org/build/tools",
+ # Seed all clones with mozilla-unified. This ensures subsequent
+ # jobs have a minimal `hg pull`.
+ "clone_upstream_url": "https://hg.mozilla.org/mozilla-unified",
+ "repo_base": "https://hg.mozilla.org",
+ 'tooltool_url': 'https://api.pub.build.mozilla.org/tooltool/',
+ "graph_selector": "/server/collect.cgi",
+ # only used for make uploadsymbols
+ 'old_packages': [
+ "%(objdir)s/dist/firefox-*",
+ "%(objdir)s/dist/fennec*",
+ "%(objdir)s/dist/seamonkey*",
+ "%(objdir)s/dist/thunderbird*",
+ "%(objdir)s/dist/install/sea/*.exe"
+ ],
+ 'stage_product': 'firefox',
+ 'platform_supports_post_upload_to_latest': True,
+ 'build_resources_path': '%(abs_src_dir)s/obj-firefox/.mozbuild/build_resources.json',
+ 'nightly_promotion_branches': ['mozilla-central', 'mozilla-aurora'],
+
+ # try will overwrite these
+ 'clone_with_purge': False,
+ 'clone_by_revision': False,
+ 'tinderbox_build_dir': None,
+ 'to_tinderbox_dated': True,
+ 'release_to_try_builds': False,
+ 'include_post_upload_builddir': False,
+ 'use_clobberer': True,
+
+ 'stage_username': 'ffxbld',
+ 'stage_ssh_key': 'ffxbld_rsa',
+ 'virtualenv_modules': [
+ 'requests==2.8.1',
+ 'PyHawk-with-a-single-extra-commit==0.1.5',
+ 'taskcluster==0.0.26',
+ ],
+ 'virtualenv_path': 'venv',
+ #
+
+ },
+ 'ConfigClass': BuildingConfig,
+ }
+ super(FxDesktopBuild, self).__init__(**buildscript_kwargs)
+
+ def _pre_config_lock(self, rw_config):
+ """grab buildbot props if we are running this in automation"""
+ super(FxDesktopBuild, self)._pre_config_lock(rw_config)
+ c = self.config
+ if c['is_automation']:
+ # parse buildbot config and add it to self.config
+ self.info("We are running this in buildbot, grab the build props")
+ self.read_buildbot_config()
+ ###
+ if c.get('stage_platform'):
+ platform_for_log_url = c['stage_platform']
+ if c.get('pgo_build'):
+ platform_for_log_url += '-pgo'
+ # postrun.py uses stage_platform buildbot prop as part of the log url
+ self.set_buildbot_property('stage_platform',
+ platform_for_log_url,
+ write_to_file=True)
+ else:
+ self.fatal("'stage_platform' not determined and is required in your config")
+
+ if self.try_message_has_flag('artifact'):
+ self.info('Artifact build requested in try syntax.')
+ variant = 'artifact'
+ if c.get('build_variant') in ['debug', 'cross-debug']:
+ variant = 'debug-artifact'
+ self._update_build_variant(rw_config, variant)
+
+ # helpers
+ def _update_build_variant(self, rw_config, variant='artifact'):
+ """ Intended for use in _pre_config_lock """
+ c = self.config
+ variant_cfg_path, _ = BuildOptionParser.find_variant_cfg_path(
+ '--custom-build-variant-cfg',
+ variant,
+ rw_config.config_parser
+ )
+ if not variant_cfg_path:
+ self.fatal('Could not find appropriate config file for variant %s' % variant)
+ # Update other parts of config to keep dump-config accurate
+ # Only dump-config is affected because most config info is set during
+ # initial parsing
+ variant_cfg_dict = parse_config_file(variant_cfg_path)
+ rw_config.all_cfg_files_and_dicts.append((variant_cfg_path, variant_cfg_dict))
+ c.update({
+ 'build_variant': variant,
+ 'config_files': c['config_files'] + [variant_cfg_path]
+ })
+
+ self.info("Updating self.config with the following from {}:".format(variant_cfg_path))
+ self.info(pprint.pformat(variant_cfg_dict))
+ c.update(variant_cfg_dict)
+ c['forced_artifact_build'] = True
+ # Bug 1231320 adds MOZHARNESS_ACTIONS in TaskCluster tasks to override default_actions
+ # We don't want that when forcing an artifact build.
+ if rw_config.volatile_config['actions']:
+ self.info("Updating volatile_config to include default_actions "
+ "from {}.".format(variant_cfg_path))
+ # add default actions in correct order
+ combined_actions = []
+ for a in rw_config.all_actions:
+ if a in c['default_actions'] or a in rw_config.volatile_config['actions']:
+ combined_actions.append(a)
+ rw_config.volatile_config['actions'] = combined_actions
+ self.info("Actions in volatile_config are now: {}".format(
+ rw_config.volatile_config['actions'])
+ )
+ # replace rw_config as well to set actions as in BaseScript
+ rw_config.set_config(c, overwrite=True)
+ rw_config.update_actions()
+ self.actions = tuple(rw_config.actions)
+ self.all_actions = tuple(rw_config.all_actions)
+
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ c = self.config
+ abs_dirs = super(FxDesktopBuild, self).query_abs_dirs()
+ if not c.get('app_ini_path'):
+ self.fatal('"app_ini_path" is needed in your config for this '
+ 'script.')
+
+ dirs = {
+ # BuildFactories in factory.py refer to a 'build' dir on the slave.
+ # This contains all the source code/objdir to compile. However,
+ # there is already a build dir in mozharness for every mh run. The
+ # 'build' that factory refers to I named: 'src' so
+ # there is a seperation in mh. for example, rather than having
+ # '{mozharness_repo}/build/build/', I have '{
+ # mozharness_repo}/build/src/'
+ 'abs_src_dir': os.path.join(abs_dirs['abs_work_dir'],
+ 'src'),
+ 'abs_obj_dir': os.path.join(abs_dirs['abs_work_dir'],
+ 'src',
+ self._query_objdir()),
+ 'abs_tools_dir': os.path.join(abs_dirs['abs_work_dir'], 'tools'),
+ 'abs_app_ini_path': c['app_ini_path'] % {
+ 'obj_dir': os.path.join(abs_dirs['abs_work_dir'],
+ 'src',
+ self._query_objdir())
+ },
+ }
+ abs_dirs.update(dirs)
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ # Actions {{{2
+ # read_buildbot_config in BuildingMixin
+ # clobber in BuildingMixin -> PurgeMixin
+ # if Linux config:
+ # reset_mock in BuildingMixing -> MockMixin
+ # setup_mock in BuildingMixing (overrides MockMixin.mock_setup)
+
+ def set_extra_try_arguments(self, action, success=None):
+ """ Override unneeded method from TryToolsMixin """
+ pass
+
+ @script.PreScriptRun
+ def suppress_windows_modal_dialogs(self, *args, **kwargs):
+ if self._is_windows():
+ # Suppress Windows modal dialogs to avoid hangs
+ import ctypes
+ ctypes.windll.kernel32.SetErrorMode(0x8001)
+
+if __name__ == '__main__':
+ fx_desktop_build = FxDesktopBuild()
+ fx_desktop_build.run_and_exit()
diff --git a/testing/mozharness/scripts/gaia_build_integration.py b/testing/mozharness/scripts/gaia_build_integration.py
new file mode 100755
index 000000000..32d188ffd
--- /dev/null
+++ b/testing/mozharness/scripts/gaia_build_integration.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.mozilla.testing.gaia_test import GaiaTest
+from mozharness.mozilla.testing.unittest import TestSummaryOutputParserHelper
+
+
+class GaiaBuildIntegrationTest(GaiaTest):
+
+ def __init__(self, require_config_file=False):
+ GaiaTest.__init__(self, require_config_file)
+
+ def run_tests(self):
+ """
+ Run the integration test suite.
+ """
+ dirs = self.query_abs_dirs()
+
+ self.node_setup()
+
+ output_parser = TestSummaryOutputParserHelper(
+ config=self.config, log_obj=self.log_obj, error_list=self.error_list)
+
+ cmd = [
+ 'make',
+ 'build-test-integration',
+ 'REPORTER=mocha-tbpl-reporter',
+ 'NODE_MODULES_SRC=npm-cache',
+ 'VIRTUALENV_EXISTS=1',
+ 'TRY_ENV=1'
+ ]
+
+ # for Mulet
+ if 'firefox' in self.binary_path:
+ cmd += ['RUNTIME=%s' % self.binary_path]
+
+ code = self.run_command(cmd, cwd=dirs['abs_gaia_dir'],
+ output_parser=output_parser,
+ output_timeout=600)
+
+ output_parser.print_summary('gaia-build-integration-tests')
+ self.publish(code)
+
+if __name__ == '__main__':
+ gaia_build_integration_test = GaiaBuildIntegrationTest()
+ gaia_build_integration_test.run_and_exit()
diff --git a/testing/mozharness/scripts/gaia_build_unit.py b/testing/mozharness/scripts/gaia_build_unit.py
new file mode 100755
index 000000000..c16ce99fa
--- /dev/null
+++ b/testing/mozharness/scripts/gaia_build_unit.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.mozilla.testing.gaia_test import GaiaTest
+from mozharness.mozilla.testing.unittest import TestSummaryOutputParserHelper
+
+
+class GaiaBuildUnitTest(GaiaTest):
+
+ def __init__(self, require_config_file=False):
+ GaiaTest.__init__(self, require_config_file)
+
+ def run_tests(self):
+ """
+ Run the gaia build unit test suite.
+ """
+ dirs = self.query_abs_dirs()
+
+ self.node_setup()
+
+ output_parser = TestSummaryOutputParserHelper(
+ config=self.config, log_obj=self.log_obj, error_list=self.error_list)
+
+ cmd = [
+ 'make',
+ 'build-test-unit',
+ 'REPORTER=mocha-tbpl-reporter',
+ 'NODE_MODULES_SRC=npm-cache',
+ 'VIRTUALENV_EXISTS=1',
+ 'TRY_ENV=1'
+ ]
+
+ # for Mulet
+ if 'firefox' in self.binary_path:
+ cmd += ['RUNTIME=%s' % self.binary_path]
+
+ code = self.run_command(cmd, cwd=dirs['abs_gaia_dir'],
+ output_parser=output_parser,
+ output_timeout=330)
+
+ output_parser.print_summary('gaia-build-unit-tests')
+ self.publish(code)
+
+if __name__ == '__main__':
+ gaia_build_unit_test = GaiaBuildUnitTest()
+ gaia_build_unit_test.run_and_exit()
diff --git a/testing/mozharness/scripts/gaia_integration.py b/testing/mozharness/scripts/gaia_integration.py
new file mode 100644
index 000000000..3edb8b964
--- /dev/null
+++ b/testing/mozharness/scripts/gaia_integration.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.mozilla.testing.gaia_test import GaiaTest
+from mozharness.mozilla.testing.unittest import TestSummaryOutputParserHelper
+
+
+class GaiaIntegrationTest(GaiaTest):
+
+ def __init__(self, require_config_file=False):
+ GaiaTest.__init__(self, require_config_file)
+
+ def run_tests(self):
+ """
+ Run the integration test suite.
+ """
+ dirs = self.query_abs_dirs()
+
+ self.node_setup()
+
+ output_parser = TestSummaryOutputParserHelper(
+ config=self.config, log_obj=self.log_obj, error_list=self.error_list)
+
+ # Bug 1046694 - add environment variables which govern test chunking
+ env = {}
+ if self.config.get('this_chunk') and self.config.get('total_chunks'):
+ env["PART"] = self.config.get('this_chunk')
+ env["NBPARTS"] = self.config.get('total_chunks')
+ env = self.query_env(partial_env=env)
+
+ # Bug 1137884 - marionette-js-runner needs to know about virtualenv
+ gaia_runner_service = (
+ dirs['abs_gaia_dir'] +
+ '/node_modules/marionette-js-runner/host/python/runner-service')
+ # Check whether python package is around since there exist versions
+ # of gaia that depend on versions of marionette-js-runner without
+ # the python stuff.
+ if os.path.exists(gaia_runner_service):
+ self.install_module('gaia-runner-service', gaia_runner_service)
+ env['VIRTUALENV_PATH'] = self.query_virtualenv_path()
+ env['HOST_LOG'] = os.path.join(dirs['abs_log_dir'], 'gecko_output.log')
+
+ cmd = [
+ 'make',
+ 'test-integration',
+ 'REPORTER=mocha-tbpl-reporter',
+ 'TEST_MANIFEST=./shared/test/integration/tbpl-manifest.json',
+ 'NODE_MODULE_SRC=npm-cache',
+ 'VIRTUALENV_EXISTS=1'
+ ]
+
+ # for Mulet
+ if 'firefox' in self.binary_path:
+ cmd += ['RUNTIME=%s' % self.binary_path]
+
+ code = self.run_command(cmd, cwd=dirs['abs_gaia_dir'], env=env,
+ output_parser=output_parser,
+ output_timeout=330)
+
+ output_parser.print_summary('gaia-integration-tests')
+ self.publish(code, passed=output_parser.passed, failed=output_parser.failed)
+
+if __name__ == '__main__':
+ gaia_integration_test = GaiaIntegrationTest()
+ gaia_integration_test.run_and_exit()
diff --git a/testing/mozharness/scripts/gaia_linter.py b/testing/mozharness/scripts/gaia_linter.py
new file mode 100755
index 000000000..e4441b92b
--- /dev/null
+++ b/testing/mozharness/scripts/gaia_linter.py
@@ -0,0 +1,148 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import os
+import re
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.log import OutputParser, ERROR
+from mozharness.mozilla.testing.gaia_test import GaiaTest
+
+
+class GaiaLinterOutputParser(OutputParser):
+
+ JSHINT_START = "Running jshint..."
+ JSHINT_DONE = "xfailed)"
+ JSHINT_ERROR = re.compile('(.+): (.*?) \(ERROR\)')
+
+ LAST_FILE = re.compile('----- FILE : (.*?) -----')
+
+ GJSLINT_START = "Running gjslint..."
+ GJSLINT_ERROR = re.compile('Line (\d+), E:(\d+):')
+
+ GENERAL_ERRORS = (re.compile('make(.*?)\*\*\*(.*?)Error'),)
+
+ def __init__(self, **kwargs):
+ self.base_dir = kwargs.pop('base_dir')
+ super(GaiaLinterOutputParser, self).__init__(**kwargs)
+ self.in_jshint = False
+ self.in_gjslint = False
+ self.last_file = 'unknown'
+
+ def log_error(self, message, filename=None):
+ if not filename:
+ self.log('TEST-UNEXPECTED-FAIL | make lint | %s' % message)
+ else:
+ path = filename
+ if self.base_dir in path:
+ path = os.path.relpath(filename, self.base_dir)
+ self.log('TEST-UNEXPECTED-FAIL | %s | %s' % (path, message),
+ level=ERROR)
+ self.num_errors += 1
+ self.worst_log_level = self.worst_level(ERROR,
+ self.worst_log_level)
+
+ def parse_single_line(self, line):
+ if not self.in_jshint:
+ if self.JSHINT_START in line:
+ self.in_jshint = True
+ self.in_gjslint = False
+ else:
+ if self.JSHINT_DONE in line:
+ self.in_jshint = False
+
+ if not self.in_gjslint:
+ if self.GJSLINT_START in line:
+ self.in_gjslint = True
+
+ if self.in_jshint:
+ m = self.JSHINT_ERROR.search(line)
+ if m:
+ self.log_error(m.groups()[1], m.groups()[0])
+
+ if self.in_gjslint:
+ m = self.LAST_FILE.search(line)
+ if m:
+ self.last_file = m.groups()[0]
+
+ m = self.GJSLINT_ERROR.search(line)
+ if m:
+ self.log_error(line, self.last_file)
+
+ for an_error in self.GENERAL_ERRORS:
+ if an_error.search(line):
+ self.log_error(line)
+
+ if self.log_output:
+ self.info(' %s' % line)
+
+ def evaluate_parser(self):
+ # generate the TinderboxPrint line for TBPL
+ if self.num_errors:
+ self.tsummary = '<em class="testfail">%d errors</em>' % self.num_errors
+ else:
+ self.tsummary = "0 errors"
+
+ def print_summary(self, suite_name):
+ self.evaluate_parser()
+ self.info("TinderboxPrint: %s: %s\n" % (suite_name, self.tsummary))
+
+
+class GaiaIntegrationTest(GaiaTest):
+
+ virtualenv_modules = ['closure_linter==2.3.13',
+ 'python-gflags',
+ ]
+
+ def __init__(self, require_config_file=False):
+ GaiaTest.__init__(self, require_config_file)
+
+ def run_tests(self):
+ """
+ Run the integration test suite.
+ """
+ dirs = self.query_abs_dirs()
+
+ # Copy the b2g desktop we built to the gaia directory so that it
+ # gets used by the marionette-js-runner.
+ self.copytree(
+ os.path.join(os.path.dirname(self.binary_path)),
+ os.path.join(dirs['abs_gaia_dir'], 'b2g'),
+ overwrite='clobber'
+ )
+
+ cmd = [
+ 'make',
+ 'lint',
+ 'NODE_MODULES_SRC=npm-cache',
+ 'VIRTUALENV_EXISTS=1'
+ ]
+
+ # for Mulet
+ if 'firefox' in self.binary_path:
+ cmd += ['RUNTIME=%s' % self.binary_path]
+
+ self.make_node_modules()
+
+ output_parser = GaiaLinterOutputParser(
+ base_dir=dirs['abs_gaia_dir'],
+ config=self.config,
+ log_obj=self.log_obj)
+
+ code = self.run_command(cmd, cwd=dirs['abs_gaia_dir'],
+ output_parser=output_parser,
+ output_timeout=600)
+
+ output_parser.print_summary('gaia-lint')
+ self.publish(code)
+
+if __name__ == '__main__':
+ gaia_integration_test = GaiaIntegrationTest()
+ gaia_integration_test.run_and_exit()
diff --git a/testing/mozharness/scripts/gaia_unit.py b/testing/mozharness/scripts/gaia_unit.py
new file mode 100755
index 000000000..660643b74
--- /dev/null
+++ b/testing/mozharness/scripts/gaia_unit.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import os
+import sys
+import glob
+import subprocess
+import json
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.mozilla.testing.gaia_test import GaiaTest
+from mozharness.mozilla.testing.unittest import TestSummaryOutputParserHelper
+
+
+class GaiaUnitTest(GaiaTest):
+ def __init__(self, require_config_file=False):
+ GaiaTest.__init__(self, require_config_file)
+
+ def pull(self, **kwargs):
+ GaiaTest.pull(self, **kwargs)
+
+ def run_tests(self):
+ """
+ Run the unit test suite.
+ """
+ dirs = self.query_abs_dirs()
+
+ self.make_node_modules()
+
+ # make the gaia profile
+ self.make_gaia(dirs['abs_gaia_dir'],
+ self.config.get('xre_path'),
+ xre_url=self.config.get('xre_url'),
+ debug=True)
+
+ # build the testrunner command arguments
+ python = self.query_python_path('python')
+ cmd = [python, '-u', os.path.join(dirs['abs_runner_dir'],
+ 'gaia_unit_test',
+ 'main.py')]
+ executable = 'firefox'
+ if 'b2g' in self.binary_path:
+ executable = 'b2g-bin'
+
+ profile = os.path.join(dirs['abs_gaia_dir'], 'profile-debug')
+ binary = os.path.join(os.path.dirname(self.binary_path), executable)
+ cmd.extend(self._build_arg('--binary', binary))
+ cmd.extend(self._build_arg('--profile', profile))
+ cmd.extend(self._build_arg('--symbols-path', self.symbols_path))
+ cmd.extend(self._build_arg('--browser-arg', self.config.get('browser_arg')))
+
+ # Add support for chunking
+ if self.config.get('total_chunks') and self.config.get('this_chunk'):
+ chunker = [ os.path.join(dirs['abs_gaia_dir'], 'bin', 'chunk'),
+ self.config.get('total_chunks'), self.config.get('this_chunk') ]
+
+ disabled_tests = []
+ disabled_manifest = os.path.join(dirs['abs_runner_dir'],
+ 'gaia_unit_test',
+ 'disabled.json')
+ with open(disabled_manifest, 'r') as m:
+ try:
+ disabled_tests = json.loads(m.read())
+ except:
+ print "Error while decoding disabled.json; please make sure this file has valid JSON syntax."
+ sys.exit(1)
+
+ # Construct a list of all tests
+ unit_tests = []
+ for path in ('apps', 'tv_apps'):
+ test_root = os.path.join(dirs['abs_gaia_dir'], path)
+ full_paths = glob.glob(os.path.join(test_root, '*/test/unit/*_test.js'))
+ unit_tests += map(lambda x: os.path.relpath(x, test_root), full_paths)
+
+ # Remove the tests that are disabled
+ active_unit_tests = filter(lambda x: x not in disabled_tests, unit_tests)
+
+ # Chunk the list as requested
+ tests_to_run = subprocess.check_output(chunker + active_unit_tests).strip().split(' ')
+
+ cmd.extend(tests_to_run)
+
+ output_parser = TestSummaryOutputParserHelper(config=self.config,
+ log_obj=self.log_obj,
+ error_list=self.error_list)
+
+ upload_dir = self.query_abs_dirs()['abs_blob_upload_dir']
+ if not os.path.isdir(upload_dir):
+ self.mkdir_p(upload_dir)
+
+ env = self.query_env()
+ env['MOZ_UPLOAD_DIR'] = upload_dir
+ # I don't like this output_timeout hardcode, but bug 920153
+ code = self.run_command(cmd, env=env,
+ output_parser=output_parser,
+ output_timeout=1760)
+
+ output_parser.print_summary('gaia-unit-tests')
+ self.publish(code)
+
+if __name__ == '__main__':
+ gaia_unit_test = GaiaUnitTest()
+ gaia_unit_test.run_and_exit()
diff --git a/testing/mozharness/scripts/marionette.py b/testing/mozharness/scripts/marionette.py
new file mode 100755
index 000000000..b7f9c2765
--- /dev/null
+++ b/testing/mozharness/scripts/marionette.py
@@ -0,0 +1,358 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+
+import copy
+import os
+import re
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.errors import TarErrorList
+from mozharness.base.log import INFO, ERROR, WARNING
+from mozharness.base.script import PreScriptAction
+from mozharness.base.transfer import TransferMixin
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
+from mozharness.mozilla.testing.errors import LogcatErrorList
+from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options
+from mozharness.mozilla.testing.unittest import TestSummaryOutputParserHelper
+from mozharness.mozilla.structuredlog import StructuredOutputParser
+
+# TODO: we could remove emulator specific code after B2G ICS emulator buildbot
+# builds is turned off, Bug 1209180.
+
+
+class MarionetteTest(TestingMixin, MercurialScript, BlobUploadMixin, TransferMixin):
+ config_options = [[
+ ["--application"],
+ {"action": "store",
+ "dest": "application",
+ "default": None,
+ "help": "application name of binary"
+ }
+ ], [
+ ["--app-arg"],
+ {"action": "store",
+ "dest": "app_arg",
+ "default": None,
+ "help": "Optional command-line argument to pass to the browser"
+ }
+ ], [
+ ["--marionette-address"],
+ {"action": "store",
+ "dest": "marionette_address",
+ "default": None,
+ "help": "The host:port of the Marionette server running inside Gecko. Unused for emulator testing",
+ }
+ ], [
+ ["--emulator"],
+ {"action": "store",
+ "type": "choice",
+ "choices": ['arm', 'x86'],
+ "dest": "emulator",
+ "default": None,
+ "help": "Use an emulator for testing",
+ }
+ ], [
+ ["--test-manifest"],
+ {"action": "store",
+ "dest": "test_manifest",
+ "default": "unit-tests.ini",
+ "help": "Path to test manifest to run relative to the Marionette "
+ "tests directory",
+ }
+ ], [
+ ["--total-chunks"],
+ {"action": "store",
+ "dest": "total_chunks",
+ "help": "Number of total chunks",
+ }
+ ], [
+ ["--this-chunk"],
+ {"action": "store",
+ "dest": "this_chunk",
+ "help": "Number of this chunk",
+ }
+ ], [
+ ["--e10s"],
+ {"action": "store_true",
+ "dest": "e10s",
+ "default": False,
+ "help": "Run tests with multiple processes. (Desktop builds only)",
+ }
+ ], [
+ ["--allow-software-gl-layers"],
+ {"action": "store_true",
+ "dest": "allow_software_gl_layers",
+ "default": False,
+ "help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor."
+ }
+ ]] + copy.deepcopy(testing_config_options) \
+ + copy.deepcopy(blobupload_config_options)
+
+ error_list = [
+ {'substr': 'FAILED (errors=', 'level': WARNING},
+ {'substr': r'''Could not successfully complete transport of message to Gecko, socket closed''', 'level': ERROR},
+ {'substr': r'''Connection to Marionette server is lost. Check gecko''', 'level': ERROR},
+ {'substr': 'Timeout waiting for marionette on port', 'level': ERROR},
+ {'regex': re.compile(r'''(TEST-UNEXPECTED|PROCESS-CRASH)'''), 'level': ERROR},
+ {'regex': re.compile(r'''(\b((?!Marionette|TestMarionette|NoSuchElement|XPathLookup|NoSuchWindow|StaleElement|ScriptTimeout|ElementNotVisible|NoSuchFrame|InvalidResponse|Javascript|Timeout|InvalidElementState|NoAlertPresent|InvalidCookieDomain|UnableToSetCookie|InvalidSelector|MoveTargetOutOfBounds)\w*)Exception)'''), 'level': ERROR},
+ ]
+
+ repos = []
+
+ def __init__(self, require_config_file=False):
+ super(MarionetteTest, self).__init__(
+ config_options=self.config_options,
+ all_actions=['clobber',
+ 'read-buildbot-config',
+ 'pull',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-tests'],
+ default_actions=['clobber',
+ 'pull',
+ 'download-and-extract',
+ 'create-virtualenv',
+ 'install',
+ 'run-tests'],
+ require_config_file=require_config_file,
+ config={'require_test_zip': True})
+
+ # these are necessary since self.config is read only
+ c = self.config
+ self.installer_url = c.get('installer_url')
+ self.installer_path = c.get('installer_path')
+ self.binary_path = c.get('binary_path')
+ self.test_url = c.get('test_url')
+ self.test_packages_url = c.get('test_packages_url')
+
+ if c.get('structured_output'):
+ self.parser_class = StructuredOutputParser
+ else:
+ self.parser_class = TestSummaryOutputParserHelper
+
+ def _pre_config_lock(self, rw_config):
+ super(MarionetteTest, self)._pre_config_lock(rw_config)
+ if not self.config.get('emulator') and not self.config.get('marionette_address'):
+ self.fatal("You need to specify a --marionette-address for non-emulator tests! (Try --marionette-address localhost:2828 )")
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(MarionetteTest, self).query_abs_dirs()
+ dirs = {}
+ dirs['abs_test_install_dir'] = os.path.join(
+ abs_dirs['abs_work_dir'], 'tests')
+ dirs['abs_marionette_dir'] = os.path.join(
+ dirs['abs_test_install_dir'], 'marionette', 'harness', 'marionette_harness')
+ dirs['abs_marionette_tests_dir'] = os.path.join(
+ dirs['abs_test_install_dir'], 'marionette', 'tests', 'testing',
+ 'marionette', 'harness', 'marionette_harness', 'tests')
+ dirs['abs_gecko_dir'] = os.path.join(
+ abs_dirs['abs_work_dir'], 'gecko')
+ dirs['abs_emulator_dir'] = os.path.join(
+ abs_dirs['abs_work_dir'], 'emulator')
+
+ dirs['abs_blob_upload_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'blobber_upload_dir')
+
+ for key in dirs.keys():
+ if key not in abs_dirs:
+ abs_dirs[key] = dirs[key]
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ @PreScriptAction('create-virtualenv')
+ def _configure_marionette_virtualenv(self, action):
+ dirs = self.query_abs_dirs()
+ requirements = os.path.join(dirs['abs_test_install_dir'],
+ 'config',
+ 'marionette_requirements.txt')
+ if os.access(requirements, os.F_OK):
+ self.register_virtualenv_module(requirements=[requirements],
+ two_pass=True)
+ else:
+ # XXX Bug 879765: Dependent modules need to be listed before parent
+ # modules, otherwise they will get installed from the pypi server.
+ # XXX Bug 908356: This block can be removed as soon as the
+ # in-tree requirements files propagate to all active trees.
+ mozbase_dir = os.path.join('tests', 'mozbase')
+ self.register_virtualenv_module(
+ 'manifestparser', os.path.join(mozbase_dir, 'manifestdestiny'))
+ for m in ('mozfile', 'mozlog', 'mozinfo', 'moznetwork', 'mozhttpd',
+ 'mozcrash', 'mozinstall', 'mozdevice', 'mozprofile',
+ 'mozprocess', 'mozrunner'):
+ self.register_virtualenv_module(
+ m, os.path.join(mozbase_dir, m))
+
+ self.register_virtualenv_module(
+ 'marionette', os.path.join('tests', 'marionette'))
+
+ def _get_options_group(self, is_emulator):
+ """
+ Determine which in tree options group to use and return the
+ appropriate key.
+ """
+ platform = 'emulator' if is_emulator else 'desktop'
+ # Currently running marionette on an emulator means webapi
+ # tests. This method will need to change if this does.
+ testsuite = 'webapi' if is_emulator else 'marionette'
+ return '{}_{}'.format(testsuite, platform)
+
+ def download_and_extract(self):
+ super(MarionetteTest, self).download_and_extract()
+
+ if self.config.get('emulator'):
+ dirs = self.query_abs_dirs()
+
+ self.mkdir_p(dirs['abs_emulator_dir'])
+ tar = self.query_exe('tar', return_type='list')
+ self.run_command(tar + ['zxf', self.installer_path],
+ cwd=dirs['abs_emulator_dir'],
+ error_list=TarErrorList,
+ halt_on_failure=True, fatal_exit_code=3)
+
+ def install(self):
+ if self.config.get('emulator'):
+ self.info("Emulator tests; skipping.")
+ else:
+ super(MarionetteTest, self).install()
+
+ def run_tests(self):
+ """
+ Run the Marionette tests
+ """
+ dirs = self.query_abs_dirs()
+
+ raw_log_file = os.path.join(dirs['abs_blob_upload_dir'],
+ 'marionette_raw.log')
+ error_summary_file = os.path.join(dirs['abs_blob_upload_dir'],
+ 'marionette_errorsummary.log')
+ html_report_file = os.path.join(dirs['abs_blob_upload_dir'],
+ 'report.html')
+
+ config_fmt_args = {
+ # emulator builds require a longer timeout
+ 'timeout': 60000 if self.config.get('emulator') else 10000,
+ 'profile': os.path.join(dirs['abs_work_dir'], 'profile'),
+ 'xml_output': os.path.join(dirs['abs_work_dir'], 'output.xml'),
+ 'html_output': os.path.join(dirs['abs_blob_upload_dir'], 'output.html'),
+ 'logcat_dir': dirs['abs_work_dir'],
+ 'emulator': 'arm',
+ 'symbols_path': self.symbols_path,
+ 'binary': self.binary_path,
+ 'address': self.config.get('marionette_address'),
+ 'raw_log_file': raw_log_file,
+ 'error_summary_file': error_summary_file,
+ 'html_report_file': html_report_file,
+ 'gecko_log': dirs["abs_blob_upload_dir"],
+ 'this_chunk': self.config.get('this_chunk', 1),
+ 'total_chunks': self.config.get('total_chunks', 1)
+ }
+
+ self.info("The emulator type: %s" % config_fmt_args["emulator"])
+ # build the marionette command arguments
+ python = self.query_python_path('python')
+
+ cmd = [python, '-u', os.path.join(dirs['abs_marionette_dir'],
+ 'runtests.py')]
+
+ manifest = os.path.join(dirs['abs_marionette_tests_dir'],
+ self.config['test_manifest'])
+
+ if self.config.get('app_arg'):
+ config_fmt_args['app_arg'] = self.config['app_arg']
+
+ if not self.config['e10s']:
+ cmd.append('--disable-e10s')
+
+ cmd.append('--gecko-log=%s' % os.path.join(dirs["abs_blob_upload_dir"],
+ 'gecko.log'))
+
+ if self.config.get("structured_output"):
+ cmd.append("--log-raw=-")
+
+ options_group = self._get_options_group(self.config.get('emulator'))
+
+ if options_group not in self.config["suite_definitions"]:
+ self.fatal("%s is not defined in the config!" % options_group)
+
+ for s in self.config["suite_definitions"][options_group]["options"]:
+ cmd.append(s % config_fmt_args)
+
+ if self.mkdir_p(dirs["abs_blob_upload_dir"]) == -1:
+ # Make sure that the logging directory exists
+ self.fatal("Could not create blobber upload directory")
+
+ cmd.append(manifest)
+
+ try_options, try_tests = self.try_args("marionette")
+ cmd.extend(self.query_tests_args(try_tests,
+ str_format_values=config_fmt_args))
+
+ env = {}
+ if self.query_minidump_stackwalk():
+ env['MINIDUMP_STACKWALK'] = self.minidump_stackwalk_path
+ env['MOZ_UPLOAD_DIR'] = self.query_abs_dirs()['abs_blob_upload_dir']
+ env['MINIDUMP_SAVE_PATH'] = self.query_abs_dirs()['abs_blob_upload_dir']
+
+ if self.config['allow_software_gl_layers']:
+ env['MOZ_LAYERS_ALLOW_SOFTWARE_GL'] = '1'
+
+ if not os.path.isdir(env['MOZ_UPLOAD_DIR']):
+ self.mkdir_p(env['MOZ_UPLOAD_DIR'])
+ env = self.query_env(partial_env=env)
+
+ marionette_parser = self.parser_class(config=self.config,
+ log_obj=self.log_obj,
+ error_list=self.error_list,
+ strict=False)
+ return_code = self.run_command(cmd, env=env,
+ output_timeout=1000,
+ output_parser=marionette_parser)
+ level = INFO
+ tbpl_status, log_level = marionette_parser.evaluate_parser(
+ return_code=return_code)
+ marionette_parser.append_tinderboxprint_line("marionette")
+
+ qemu = os.path.join(dirs['abs_work_dir'], 'qemu.log')
+ if os.path.isfile(qemu):
+ self.copyfile(qemu, os.path.join(dirs['abs_blob_upload_dir'],
+ 'qemu.log'))
+
+ # dump logcat output if there were failures
+ if self.config.get('emulator'):
+ if marionette_parser.failed != "0" or 'T-FAIL' in marionette_parser.tsummary:
+ logcat = os.path.join(dirs['abs_work_dir'], 'emulator-5554.log')
+ if os.access(logcat, os.F_OK):
+ self.info('dumping logcat')
+ self.run_command(['cat', logcat], error_list=LogcatErrorList)
+ else:
+ self.info('no logcat file found')
+ else:
+ # .. or gecko.log if it exists
+ gecko_log = os.path.join(self.config['base_work_dir'], 'gecko.log')
+ if os.access(gecko_log, os.F_OK):
+ self.info('dumping gecko.log')
+ self.run_command(['cat', gecko_log])
+ self.rmtree(gecko_log)
+ else:
+ self.info('gecko.log not found')
+
+ marionette_parser.print_summary('marionette')
+
+ self.log("Marionette exited with return code %s: %s" % (return_code, tbpl_status),
+ level=level)
+ self.buildbot_status(tbpl_status)
+
+
+if __name__ == '__main__':
+ marionetteTest = MarionetteTest()
+ marionetteTest.run_and_exit()
diff --git a/testing/mozharness/scripts/marionette_harness_tests.py b/testing/mozharness/scripts/marionette_harness_tests.py
new file mode 100644
index 000000000..0811bef9c
--- /dev/null
+++ b/testing/mozharness/scripts/marionette_harness_tests.py
@@ -0,0 +1,141 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+import copy
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.python import PreScriptAction
+from mozharness.base.python import (
+ VirtualenvMixin,
+ virtualenv_config_options,
+)
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.buildbot import (
+ BuildbotMixin, TBPL_SUCCESS, TBPL_WARNING, TBPL_FAILURE,
+ TBPL_EXCEPTION
+)
+
+marionette_harness_tests_config_options = [
+ [['--tests'], {
+ 'dest': 'test_path',
+ 'default': None,
+ 'help': 'Path to test_*.py or directory relative to src root.',
+ }],
+ [['--src-dir'], {
+ 'dest': 'rel_src_dir',
+ 'default': None,
+ 'help': 'Path to hg.mo source checkout relative to work dir.',
+ }],
+
+] + copy.deepcopy(virtualenv_config_options)
+
+marionette_harness_tests_config = {
+ "find_links": [
+ "http://pypi.pub.build.mozilla.org/pub",
+ ],
+ "pip_index": False,
+ # relative to workspace
+ "rel_src_dir": os.path.join("build", "src"),
+}
+
+class MarionetteHarnessTests(VirtualenvMixin, BuildbotMixin, BaseScript):
+
+ def __init__(self, config_options=None,
+ all_actions=None, default_actions=None,
+ *args, **kwargs):
+ config_options = config_options or marionette_harness_tests_config_options
+ actions = [
+ 'clobber',
+ 'create-virtualenv',
+ 'run-tests',
+ ]
+ super(MarionetteHarnessTests, self).__init__(
+ config_options=config_options,
+ all_actions=all_actions or actions,
+ default_actions=default_actions or actions,
+ config=marionette_harness_tests_config,
+ *args, **kwargs)
+
+ @PreScriptAction('create-virtualenv')
+ def _pre_create_virtualenv(self, action):
+ dirs = self.query_abs_dirs()
+ c = self.config
+ requirements = os.path.join(
+ dirs['abs_src_dir'],
+ 'testing', 'config',
+ 'marionette_harness_test_requirements.txt'
+ )
+ self.register_virtualenv_module(
+ requirements=[requirements],
+ two_pass=True
+ )
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ c = self.config
+ abs_dirs = super(MarionetteHarnessTests, self).query_abs_dirs()
+ dirs = {
+ 'abs_src_dir': os.path.abspath(
+ os.path.join(abs_dirs['base_work_dir'], c['rel_src_dir'])
+ ),
+ }
+
+ for key in dirs:
+ if key not in abs_dirs:
+ abs_dirs[key] = dirs[key]
+ self.abs_dirs = abs_dirs
+
+ return self.abs_dirs
+
+ def _get_pytest_status(self, code):
+ """
+ Translate pytest exit code to TH status
+
+ Based on https://github.com/pytest-dev/pytest/blob/master/_pytest/main.py#L21-L26
+ """
+ if code == 0:
+ return TBPL_SUCCESS
+ elif code == 1:
+ return TBPL_WARNING
+ elif 1 < code < 6:
+ self.error("pytest returned exit code: %s" % code)
+ return TBPL_FAILURE
+ else:
+ return TBPL_EXCEPTION
+
+ def run_tests(self):
+ """Run all the tests"""
+ dirs = self.query_abs_dirs()
+ test_relpath = self.config.get(
+ 'test_path',
+ os.path.join('testing', 'marionette',
+ 'harness', 'marionette_harness', 'tests',
+ 'harness_unit')
+ )
+ test_path = os.path.join(dirs['abs_src_dir'], test_relpath)
+ self.activate_virtualenv()
+ import pytest
+ command = ['-p', 'no:terminalreporter', # disable pytest logging
+ test_path]
+ logs = {}
+ for fmt in ['tbpl', 'mach', 'raw']:
+ logs[fmt] = os.path.join(dirs['abs_log_dir'],
+ 'mn-harness_{}.log'.format(fmt))
+ command.extend(['--log-'+fmt, logs[fmt]])
+ self.info('Calling pytest.main with the following arguments: %s' % command)
+ status = self._get_pytest_status(pytest.main(command))
+ self.read_from_file(logs['tbpl'])
+ for log in logs.values():
+ self.copy_to_upload_dir(log, dest='logs/')
+ self.buildbot_status(status)
+
+
+if __name__ == '__main__':
+ script = MarionetteHarnessTests()
+ script.run_and_exit()
diff --git a/testing/mozharness/scripts/merge_day/gecko_migration.py b/testing/mozharness/scripts/merge_day/gecko_migration.py
new file mode 100755
index 000000000..7208630e0
--- /dev/null
+++ b/testing/mozharness/scripts/merge_day/gecko_migration.py
@@ -0,0 +1,545 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" gecko_migration.py
+
+Merge day script for gecko (mozilla-central -> mozilla-aurora,
+mozilla-aurora -> mozilla-beta, mozilla-beta -> mozilla-release).
+
+Ported largely from
+http://hg.mozilla.org/build/tools/file/084bc4e2fc76/release/beta2release.py
+and
+http://hg.mozilla.org/build/tools/file/084bc4e2fc76/release/merge_helper.py
+"""
+
+import os
+import pprint
+import subprocess
+import sys
+from getpass import getpass
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.errors import HgErrorList
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.selfserve import SelfServeMixin
+from mozharness.mozilla.updates.balrog import BalrogMixin
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.repo_manupulation import MercurialRepoManipulationMixin
+
+VALID_MIGRATION_BEHAVIORS = (
+ "beta_to_release", "aurora_to_beta", "central_to_aurora", "release_to_esr",
+ "bump_second_digit",
+)
+
+
+# GeckoMigration {{{1
+class GeckoMigration(MercurialScript, BalrogMixin, VirtualenvMixin,
+ SelfServeMixin, BuildbotMixin,
+ MercurialRepoManipulationMixin):
+ config_options = [
+ [['--hg-user', ], {
+ "action": "store",
+ "dest": "hg_user",
+ "type": "string",
+ "default": "ffxbld <release@mozilla.com>",
+ "help": "Specify what user to use to commit to hg.",
+ }],
+ [['--balrog-api-root', ], {
+ "action": "store",
+ "dest": "balrog_api_root",
+ "type": "string",
+ "help": "Specify Balrog API root URL.",
+ }],
+ [['--balrog-username', ], {
+ "action": "store",
+ "dest": "balrog_username",
+ "type": "string",
+ "help": "Specify what user to connect to Balrog with.",
+ }],
+ [['--balrog-credentials-file', ], {
+ "action": "store",
+ "dest": "balrog_credentials_file",
+ "type": "string",
+ "help": "The file containing the Balrog credentials.",
+ }],
+ [['--remove-locale', ], {
+ "action": "extend",
+ "dest": "remove_locales",
+ "type": "string",
+ "help": "Comma separated list of locales to remove from the 'to' repo.",
+ }],
+ ]
+ gecko_repos = None
+
+ def __init__(self, require_config_file=True):
+ super(GeckoMigration, self).__init__(
+ config_options=virtualenv_config_options + self.config_options,
+ all_actions=[
+ 'clobber',
+ 'create-virtualenv',
+ 'clean-repos',
+ 'pull',
+ 'lock-update-paths',
+ 'migrate',
+ 'bump_second_digit',
+ 'commit-changes',
+ 'push',
+ 'trigger-builders',
+ ],
+ default_actions=[
+ 'clean-repos',
+ 'pull',
+ 'migrate',
+ ],
+ require_config_file=require_config_file
+ )
+ self.run_sanity_check()
+
+# Helper methods {{{1
+ def run_sanity_check(self):
+ """ Verify the configs look sane before proceeding.
+ """
+ message = ""
+ if self.config['migration_behavior'] not in VALID_MIGRATION_BEHAVIORS:
+ message += "%s must be one of %s!\n" % (self.config['migration_behavior'], VALID_MIGRATION_BEHAVIORS)
+ if self.config['migration_behavior'] == 'beta_to_release':
+ if self.config.get("require_remove_locales") and not self.config.get("remove_locales") and 'migrate' in self.actions:
+ message += "You must specify --remove-locale!\n"
+ else:
+ if self.config.get("require_remove_locales") or self.config.get("remove_locales"):
+ self.warning("--remove-locale isn't valid unless you're using beta_to_release migration_behavior!\n")
+ if message:
+ self.fatal(message)
+
+ def query_abs_dirs(self):
+ """ Allow for abs_from_dir and abs_to_dir
+ """
+ if self.abs_dirs:
+ return self.abs_dirs
+ dirs = super(GeckoMigration, self).query_abs_dirs()
+ self.abs_dirs['abs_tools_dir'] = os.path.join(
+ dirs['abs_work_dir'], 'tools'
+ )
+ self.abs_dirs['abs_tools_lib_dir'] = os.path.join(
+ dirs['abs_work_dir'], 'tools', 'lib', 'python'
+ )
+ for k in ('from', 'to'):
+ url = self.config.get("%s_repo_url" % k)
+ if url:
+ dir_name = self.get_filename_from_url(url)
+ self.info("adding %s" % dir_name)
+ self.abs_dirs['abs_%s_dir' % k] = os.path.join(
+ dirs['abs_work_dir'], dir_name
+ )
+ return self.abs_dirs
+
+ def query_repos(self):
+ """ Build a list of repos to clone.
+ """
+ if self.gecko_repos:
+ return self.gecko_repos
+ self.info("Building gecko_repos list...")
+ dirs = self.query_abs_dirs()
+ self.gecko_repos = []
+ for k in ('from', 'to'):
+ repo_key = "%s_repo_url" % k
+ url = self.config.get(repo_key)
+ if url:
+ self.gecko_repos.append({
+ "repo": url,
+ "branch": self.config.get("%s_repo_branch" % (k,), "default"),
+ "dest": dirs['abs_%s_dir' % k],
+ "vcs": "hg",
+ # "hg" vcs uses robustcheckout extension requires the use of a share
+ # but having a share breaks migration logic when merging repos.
+ # Solution: tell hg vcs to create a unique share directory for each
+ # gecko repo. see mozharness/base/vcs/mercurial.py for implementation
+ "use_vcs_unique_share": True,
+ })
+ else:
+ self.warning("Skipping %s" % repo_key)
+ self.info(pprint.pformat(self.gecko_repos))
+ return self.gecko_repos
+
+ def query_commit_dirs(self):
+ dirs = self.query_abs_dirs()
+ commit_dirs = [dirs['abs_to_dir']]
+ if self.config['migration_behavior'] == 'central_to_aurora':
+ commit_dirs.append(dirs['abs_from_dir'])
+ return commit_dirs
+
+ def query_commit_message(self):
+ return "Update configs. IGNORE BROKEN CHANGESETS CLOSED TREE NO BUG a=release ba=release"
+
+ def query_push_dirs(self):
+ dirs = self.query_abs_dirs()
+ return dirs.get('abs_from_dir'), dirs.get('abs_to_dir')
+
+ def query_push_args(self, cwd):
+ if cwd == self.query_abs_dirs()['abs_to_dir'] and \
+ self.config['migration_behavior'] == 'beta_to_release':
+ return ['--new-branch', '-r', '.']
+ else:
+ return ['-r', '.']
+
+ def query_from_revision(self):
+ """ Shortcut to get the revision for the from repo
+ """
+ dirs = self.query_abs_dirs()
+ return self.query_hg_revision(dirs['abs_from_dir'])
+
+ def query_to_revision(self):
+ """ Shortcut to get the revision for the to repo
+ """
+ dirs = self.query_abs_dirs()
+ return self.query_hg_revision(dirs['abs_to_dir'])
+
+ def hg_merge_via_debugsetparents(self, cwd, old_head, new_head,
+ preserve_tags=True, user=None):
+ """ Merge 2 heads avoiding non-fastforward commits
+ """
+ hg = self.query_exe('hg', return_type='list')
+ cmd = hg + ['debugsetparents', new_head, old_head]
+ self.run_command(cmd, cwd=cwd, error_list=HgErrorList,
+ halt_on_failure=True)
+ self.hg_commit(
+ cwd,
+ message="Merge old head via |hg debugsetparents %s %s|. "
+ "CLOSED TREE DONTBUILD a=release" % (new_head, old_head),
+ user=user
+ )
+ if preserve_tags:
+ # I don't know how to do this elegantly.
+ # I'm reverting .hgtags to old_head, then appending the new tags
+ # from new_head to .hgtags, and hoping nothing goes wrong.
+ # I'd rather not write patch files from scratch, so this seems
+ # like a slightly more complex but less objectionable method?
+ self.info("Trying to preserve tags from before debugsetparents...")
+ dirs = self.query_abs_dirs()
+ patch_file = os.path.join(dirs['abs_work_dir'], 'patch_file')
+ self.run_command(
+ subprocess.list2cmdline(hg + ['diff', '-r', old_head, '.hgtags', '-U9', '>', patch_file]),
+ cwd=cwd,
+ )
+ self.run_command(
+ ['patch', '-R', '-p1', '-i', patch_file],
+ cwd=cwd,
+ halt_on_failure=True,
+ )
+ tag_diff = self.read_from_file(patch_file)
+ with self.opened(os.path.join(cwd, '.hgtags'), open_mode='a') as (fh, err):
+ if err:
+ self.fatal("Can't append to .hgtags!")
+ for n, line in enumerate(tag_diff.splitlines()):
+ # The first 4 lines of a patch are headers, so we ignore them.
+ if n < 5:
+ continue
+ # Even after that, the only lines we really care about are
+ # additions to the file.
+ # TODO: why do we only care about additions? I couldn't
+ # figure that out by reading this code.
+ if not line.startswith('+'):
+ continue
+ line = line.replace('+', '')
+ (changeset, tag) = line.split(' ')
+ if len(changeset) != 40:
+ continue
+ fh.write("%s\n" % line)
+ out = self.get_output_from_command(['hg', 'status', '.hgtags'],
+ cwd=cwd)
+ if out:
+ self.hg_commit(
+ cwd,
+ message="Preserve old tags after debugsetparents. "
+ "CLOSED TREE DONTBUILD a=release",
+ user=user,
+ )
+ else:
+ self.info(".hgtags file is identical, no need to commit")
+
+ def remove_locales(self, file_name, locales):
+ """ Remove locales from shipped-locales (m-r only)
+ """
+ contents = self.read_from_file(file_name)
+ new_contents = ""
+ for line in contents.splitlines():
+ locale = line.split()[0]
+ if locale not in locales:
+ new_contents += "%s\n" % line
+ else:
+ self.info("Removed locale: %s" % locale)
+ self.write_to_file(file_name, new_contents)
+
+ def touch_clobber_file(self, cwd):
+ clobber_file = os.path.join(cwd, 'CLOBBER')
+ contents = self.read_from_file(clobber_file)
+ new_contents = ""
+ for line in contents.splitlines():
+ line = line.strip()
+ if line.startswith("#") or line == '':
+ new_contents += "%s\n" % line
+ new_contents += "Merge day clobber"
+ self.write_to_file(clobber_file, new_contents)
+
+ def bump_version(self, cwd, curr_version, next_version, curr_suffix,
+ next_suffix, bump_major=False):
+ """ Bump versions (m-c, m-a, m-b).
+
+ At some point we may want to unhardcode these filenames into config
+ """
+ curr_weave_version = str(int(curr_version) + 2)
+ next_weave_version = str(int(curr_weave_version) + 1)
+ for f in self.config["version_files"]:
+ from_ = "%s.0%s" % (curr_version, curr_suffix)
+ to = "%s.0%s%s" % (next_version, next_suffix, f["suffix"])
+ self.replace(os.path.join(cwd, f["file"]), from_, to)
+
+ # only applicable for m-c
+ if bump_major:
+ self.replace(
+ os.path.join(cwd, "xpcom/components/Module.h"),
+ "static const unsigned int kVersion = %s;" % curr_version,
+ "static const unsigned int kVersion = %s;" % next_version
+ )
+ self.replace(
+ os.path.join(cwd, "services/sync/moz.build"),
+ "DEFINES['weave_version'] = '1.%s.0'" % curr_weave_version,
+ "DEFINES['weave_version'] = '1.%s.0'" % next_weave_version
+ )
+
+ # Branch-specific workflow helper methods {{{1
+ def central_to_aurora(self, end_tag):
+ """ mozilla-central -> mozilla-aurora behavior.
+
+ We could have all of these individually toggled by flags, but
+ by separating into workflow methods we can be more precise about
+ what happens in each workflow, while allowing for things like
+ staging beta user repo migrations.
+ """
+ dirs = self.query_abs_dirs()
+ self.info("Reverting locales")
+ hg = self.query_exe("hg", return_type="list")
+ for f in self.config["locale_files"]:
+ self.run_command(
+ hg + ["revert", "-r", end_tag, f],
+ cwd=dirs['abs_to_dir'],
+ error_list=HgErrorList,
+ halt_on_failure=True,
+ )
+ next_ma_version = self.get_version(dirs['abs_to_dir'])[0]
+ self.bump_version(dirs['abs_to_dir'], next_ma_version, next_ma_version, "a1", "a2")
+ self.apply_replacements()
+ # bump m-c version
+ curr_mc_version = self.get_version(dirs['abs_from_dir'])[0]
+ next_mc_version = str(int(curr_mc_version) + 1)
+ self.bump_version(
+ dirs['abs_from_dir'], curr_mc_version, next_mc_version, "a1", "a1",
+ bump_major=True
+ )
+ # touch clobber files
+ self.touch_clobber_file(dirs['abs_from_dir'])
+ self.touch_clobber_file(dirs['abs_to_dir'])
+
+ def aurora_to_beta(self, *args, **kwargs):
+ """ mozilla-aurora -> mozilla-beta behavior.
+
+ We could have all of these individually toggled by flags, but
+ by separating into workflow methods we can be more precise about
+ what happens in each workflow, while allowing for things like
+ staging beta user repo migrations.
+ """
+ dirs = self.query_abs_dirs()
+ mb_version = self.get_version(dirs['abs_to_dir'])[0]
+ self.bump_version(dirs['abs_to_dir'], mb_version, mb_version, "a2", "")
+ self.apply_replacements()
+ self.touch_clobber_file(dirs['abs_to_dir'])
+ # TODO mozconfig diffing
+ # The build/tools version only checks the mozconfigs from hgweb, so
+ # can't help pre-push. The in-tree mozconfig diffing requires a mach
+ # virtualenv to be installed. If we want this sooner we can put this
+ # in the push action; otherwise we may just wait until we have in-tree
+ # mozconfig checking.
+
+ def beta_to_release(self, *args, **kwargs):
+ """ mozilla-beta -> mozilla-release behavior.
+
+ We could have all of these individually toggled by flags, but
+ by separating into workflow methods we can be more precise about
+ what happens in each workflow, while allowing for things like
+ staging beta user repo migrations.
+ """
+ dirs = self.query_abs_dirs()
+ # Reset display_version.txt
+ for f in self.config["copy_files"]:
+ self.copyfile(
+ os.path.join(dirs['abs_to_dir'], f["src"]),
+ os.path.join(dirs['abs_to_dir'], f["dst"]))
+
+ self.apply_replacements()
+ if self.config.get("remove_locales"):
+ self.remove_locales(
+ os.path.join(dirs['abs_to_dir'], "browser/locales/shipped-locales"),
+ self.config['remove_locales']
+ )
+ self.touch_clobber_file(dirs['abs_to_dir'])
+
+ def release_to_esr(self, *args, **kwargs):
+ """ mozilla-release -> mozilla-esrNN behavior. """
+ dirs = self.query_abs_dirs()
+ for to_transplant in self.config.get("transplant_patches", []):
+ self.transplant(repo=to_transplant["repo"],
+ changeset=to_transplant["changeset"],
+ cwd=dirs['abs_to_dir'])
+ self.apply_replacements()
+ self.touch_clobber_file(dirs['abs_to_dir'])
+
+ def apply_replacements(self):
+ dirs = self.query_abs_dirs()
+ for f, from_, to in self.config["replacements"]:
+ self.replace(os.path.join(dirs['abs_to_dir'], f), from_, to)
+
+ def transplant(self, repo, changeset, cwd):
+ """Transplant a Mercurial changeset from a remote repository."""
+ hg = self.query_exe("hg", return_type="list")
+ cmd = hg + ["--config", "extensions.transplant=", "transplant",
+ "--source", repo, changeset]
+ self.info("Transplanting %s from %s" % (changeset, repo))
+ status = self.run_command(
+ cmd,
+ cwd=cwd,
+ error_list=HgErrorList,
+ )
+ if status != 0:
+ self.fatal("Cannot transplant %s from %s properly" %
+ (changeset, repo))
+
+ def pull_from_repo(self, from_dir, to_dir, revision=None, branch=None):
+ """ Pull from one repo to another. """
+ hg = self.query_exe("hg", return_type="list")
+ cmd = hg + ["pull"]
+ if revision:
+ cmd.extend(["-r", revision])
+ cmd.append(from_dir)
+ self.run_command(
+ cmd,
+ cwd=to_dir,
+ error_list=HgErrorList,
+ halt_on_failure=True,
+ )
+ cmd = hg + ["update", "-C"]
+ if branch or revision:
+ cmd.extend(["-r", branch or revision])
+ self.run_command(
+ cmd,
+ cwd=to_dir,
+ error_list=HgErrorList,
+ halt_on_failure=True,
+ )
+
+# Actions {{{1
+ def bump_second_digit(self, *args, **kwargs):
+ """Bump second digit.
+
+ ESR need only the second digit bumped as a part of merge day."""
+ dirs = self.query_abs_dirs()
+ version = self.get_version(dirs['abs_to_dir'])
+ curr_version = ".".join(version)
+ next_version = list(version)
+ # bump the second digit
+ next_version[1] = str(int(next_version[1]) + 1)
+ # Take major+minor and append '0' accordng to Firefox version schema.
+ # 52.0 will become 52.1.0, not 52.1
+ next_version = ".".join(next_version[:2] + ['0'])
+ for f in self.config["version_files"]:
+ self.replace(os.path.join(dirs['abs_to_dir'], f["file"]),
+ curr_version, next_version + f["suffix"])
+ self.touch_clobber_file(dirs['abs_to_dir'])
+
+ def pull(self):
+ """ Pull tools first, then clone the gecko repos
+ """
+ repos = [{
+ "repo": self.config["tools_repo_url"],
+ "branch": self.config["tools_repo_branch"],
+ "dest": "tools",
+ "vcs": "hg",
+ }] + self.query_repos()
+ super(GeckoMigration, self).pull(repos=repos)
+
+ def lock_update_paths(self):
+ self.lock_balrog_rules(self.config["balrog_rules_to_lock"])
+
+ def migrate(self):
+ """ Perform the migration.
+ """
+ dirs = self.query_abs_dirs()
+ from_fx_major_version = self.get_version(dirs['abs_from_dir'])[0]
+ to_fx_major_version = self.get_version(dirs['abs_to_dir'])[0]
+ base_from_rev = self.query_from_revision()
+ base_to_rev = self.query_to_revision()
+ base_tag = self.config['base_tag'] % {'major_version': from_fx_major_version}
+ end_tag = self.config['end_tag'] % {'major_version': to_fx_major_version}
+ self.hg_tag(
+ dirs['abs_from_dir'], base_tag, user=self.config['hg_user'],
+ revision=base_from_rev,
+ )
+ new_from_rev = self.query_from_revision()
+ self.info("New revision %s" % new_from_rev)
+ pull_revision = None
+ if not self.config.get("pull_all_branches"):
+ pull_revision = new_from_rev
+ self.pull_from_repo(
+ dirs['abs_from_dir'], dirs['abs_to_dir'],
+ revision=pull_revision,
+ branch="default",
+ )
+ if self.config.get("requires_head_merge") is not False:
+ self.hg_merge_via_debugsetparents(
+ dirs['abs_to_dir'], old_head=base_to_rev, new_head=new_from_rev,
+ user=self.config['hg_user'],
+ )
+ self.hg_tag(
+ dirs['abs_to_dir'], end_tag, user=self.config['hg_user'],
+ revision=base_to_rev, force=True,
+ )
+ # Call beta_to_release etc.
+ if not hasattr(self, self.config['migration_behavior']):
+ self.fatal("Don't know how to proceed with migration_behavior %s !" % self.config['migration_behavior'])
+ getattr(self, self.config['migration_behavior'])(end_tag=end_tag)
+ self.info("Verify the diff, and apply any manual changes, such as disabling features, and --commit-changes")
+
+ def trigger_builders(self):
+ """Triggers builders that should be run directly after a merge.
+ There are two different types of things we trigger:
+ 1) Nightly builds ("post_merge_nightly_branches" in the config).
+ These are triggered with buildapi's nightly build endpoint to avoid
+ duplicating all of the nightly builder names into the gecko
+ migration mozharness configs. (Which would surely get out of date
+ very quickly).
+ 2) Arbitrary builders ("post_merge_builders"). These are additional
+ builders to trigger that aren't part of the nightly builder set.
+ Previous example: hg bundle generation builders.
+ """
+ dirs = self.query_abs_dirs()
+ branch = self.config["to_repo_url"].rstrip("/").split("/")[-1]
+ revision = self.query_to_revision()
+ # Horrible hack because our internal buildapi interface doesn't let us
+ # actually do anything. Need to use the public one w/ auth.
+ username = raw_input("LDAP Username: ")
+ password = getpass(prompt="LDAP Password: ")
+ auth = (username, password)
+ for builder in self.config["post_merge_builders"]:
+ self.trigger_arbitrary_job(builder, branch, revision, auth)
+ for nightly_branch in self.config["post_merge_nightly_branches"]:
+ nightly_revision = self.query_hg_revision(os.path.join(dirs["abs_work_dir"], nightly_branch))
+ self.trigger_nightly_builds(nightly_branch, nightly_revision, auth)
+
+# __main__ {{{1
+if __name__ == '__main__':
+ GeckoMigration().run_and_exit()
diff --git a/testing/mozharness/scripts/mobile_l10n.py b/testing/mozharness/scripts/mobile_l10n.py
new file mode 100755
index 000000000..cbac6fa67
--- /dev/null
+++ b/testing/mozharness/scripts/mobile_l10n.py
@@ -0,0 +1,714 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""mobile_l10n.py
+
+This currently supports nightly and release single locale repacks for
+Android. This also creates nightly updates.
+"""
+
+from copy import deepcopy
+import os
+import re
+import subprocess
+import sys
+import time
+import shlex
+
+try:
+ import simplejson as json
+ assert json
+except ImportError:
+ import json
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.errors import BaseErrorList, MakefileErrorList
+from mozharness.base.log import OutputParser
+from mozharness.base.transfer import TransferMixin
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.purge import PurgeMixin
+from mozharness.mozilla.release import ReleaseMixin
+from mozharness.mozilla.signing import MobileSigningMixin
+from mozharness.mozilla.tooltool import TooltoolMixin
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.l10n.locales import LocalesMixin
+from mozharness.mozilla.mock import MockMixin
+from mozharness.mozilla.updates.balrog import BalrogMixin
+from mozharness.base.python import VirtualenvMixin
+from mozharness.mozilla.taskcluster_helper import Taskcluster
+
+
+# MobileSingleLocale {{{1
+class MobileSingleLocale(MockMixin, LocalesMixin, ReleaseMixin,
+ MobileSigningMixin, TransferMixin, TooltoolMixin,
+ BuildbotMixin, PurgeMixin, MercurialScript, BalrogMixin,
+ VirtualenvMixin):
+ config_options = [[
+ ['--locale', ],
+ {"action": "extend",
+ "dest": "locales",
+ "type": "string",
+ "help": "Specify the locale(s) to sign and update"
+ }
+ ], [
+ ['--locales-file', ],
+ {"action": "store",
+ "dest": "locales_file",
+ "type": "string",
+ "help": "Specify a file to determine which locales to sign and update"
+ }
+ ], [
+ ['--tag-override', ],
+ {"action": "store",
+ "dest": "tag_override",
+ "type": "string",
+ "help": "Override the tags set for all repos"
+ }
+ ], [
+ ['--user-repo-override', ],
+ {"action": "store",
+ "dest": "user_repo_override",
+ "type": "string",
+ "help": "Override the user repo path for all repos"
+ }
+ ], [
+ ['--release-config-file', ],
+ {"action": "store",
+ "dest": "release_config_file",
+ "type": "string",
+ "help": "Specify the release config file to use"
+ }
+ ], [
+ ['--key-alias', ],
+ {"action": "store",
+ "dest": "key_alias",
+ "type": "choice",
+ "default": "nightly",
+ "choices": ["nightly", "release"],
+ "help": "Specify the signing key alias"
+ }
+ ], [
+ ['--this-chunk', ],
+ {"action": "store",
+ "dest": "this_locale_chunk",
+ "type": "int",
+ "help": "Specify which chunk of locales to run"
+ }
+ ], [
+ ['--total-chunks', ],
+ {"action": "store",
+ "dest": "total_locale_chunks",
+ "type": "int",
+ "help": "Specify the total number of chunks of locales"
+ }
+ ], [
+ ["--disable-mock"],
+ {"dest": "disable_mock",
+ "action": "store_true",
+ "help": "do not run under mock despite what gecko-config says",
+ }
+ ], [
+ ['--revision', ],
+ {"action": "store",
+ "dest": "revision",
+ "type": "string",
+ "help": "Override the gecko revision to use (otherwise use buildbot supplied"
+ " value, or en-US revision) "}
+ ]]
+
+ def __init__(self, require_config_file=True):
+ buildscript_kwargs = {
+ 'all_actions': [
+ "clobber",
+ "pull",
+ "clone-locales",
+ "list-locales",
+ "setup",
+ "repack",
+ "validate-repacks-signed",
+ "upload-repacks",
+ "create-virtualenv",
+ "taskcluster-upload",
+ "submit-to-balrog",
+ "summary",
+ ],
+ 'config': {
+ 'taskcluster_credentials_file': 'oauth.txt',
+ 'virtualenv_modules': [
+ 'requests==2.8.1',
+ 'PyHawk-with-a-single-extra-commit==0.1.5',
+ 'taskcluster==0.0.26',
+ ],
+ 'virtualenv_path': 'venv',
+ },
+ }
+ LocalesMixin.__init__(self)
+ MercurialScript.__init__(
+ self,
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ **buildscript_kwargs
+ )
+ self.base_package_name = None
+ self.buildid = None
+ self.make_ident_output = None
+ self.repack_env = None
+ self.revision = None
+ self.upload_env = None
+ self.version = None
+ self.upload_urls = {}
+ self.locales_property = {}
+
+ # Helper methods {{{2
+ def query_repack_env(self):
+ if self.repack_env:
+ return self.repack_env
+ c = self.config
+ replace_dict = {}
+ if c.get('release_config_file'):
+ rc = self.query_release_config()
+ replace_dict = {
+ 'version': rc['version'],
+ 'buildnum': rc['buildnum']
+ }
+ repack_env = self.query_env(partial_env=c.get("repack_env"),
+ replace_dict=replace_dict)
+ if c.get('base_en_us_binary_url') and c.get('release_config_file'):
+ rc = self.query_release_config()
+ repack_env['EN_US_BINARY_URL'] = c['base_en_us_binary_url'] % replace_dict
+ if 'MOZ_SIGNING_SERVERS' in os.environ:
+ repack_env['MOZ_SIGN_CMD'] = subprocess.list2cmdline(self.query_moz_sign_cmd(formats=['jar']))
+ self.repack_env = repack_env
+ return self.repack_env
+
+ def query_l10n_env(self):
+ return self.query_env()
+
+ def query_upload_env(self):
+ if self.upload_env:
+ return self.upload_env
+ c = self.config
+ replace_dict = {
+ 'buildid': self.query_buildid(),
+ 'version': self.query_version(),
+ }
+ replace_dict.update(c)
+
+ # Android l10n builds use a non-standard location for l10n files. Other
+ # builds go to 'mozilla-central-l10n', while android builds add part of
+ # the platform name as well, like 'mozilla-central-android-api-15-l10n'.
+ # So we override the branch with something that contains the platform
+ # name.
+ replace_dict['branch'] = c['upload_branch']
+ replace_dict['post_upload_extra'] = ' '.join(c.get('post_upload_extra', []))
+
+ upload_env = self.query_env(partial_env=c.get("upload_env"),
+ replace_dict=replace_dict)
+ if 'MOZ_SIGNING_SERVERS' in os.environ:
+ upload_env['MOZ_SIGN_CMD'] = subprocess.list2cmdline(self.query_moz_sign_cmd())
+ if self.query_is_release_or_beta():
+ upload_env['MOZ_PKG_VERSION'] = '%(version)s' % replace_dict
+ self.upload_env = upload_env
+ return self.upload_env
+
+ def _query_make_ident_output(self):
+ """Get |make ident| output from the objdir.
+ Only valid after setup is run.
+ """
+ if self.make_ident_output:
+ return self.make_ident_output
+ env = self.query_repack_env()
+ dirs = self.query_abs_dirs()
+ output = self.get_output_from_command_m(["make", "ident"],
+ cwd=dirs['abs_locales_dir'],
+ env=env,
+ silent=True,
+ halt_on_failure=True)
+ parser = OutputParser(config=self.config, log_obj=self.log_obj,
+ error_list=MakefileErrorList)
+ parser.add_lines(output)
+ self.make_ident_output = output
+ return output
+
+ def query_buildid(self):
+ """Get buildid from the objdir.
+ Only valid after setup is run.
+ """
+ if self.buildid:
+ return self.buildid
+ r = re.compile("buildid (\d+)")
+ output = self._query_make_ident_output()
+ for line in output.splitlines():
+ m = r.match(line)
+ if m:
+ self.buildid = m.groups()[0]
+ return self.buildid
+
+ def query_revision(self):
+ """Get revision from the objdir.
+ Only valid after setup is run.
+ """
+ if self.revision:
+ return self.revision
+ r = re.compile(r"gecko_revision ([0-9a-f]+\+?)")
+ output = self._query_make_ident_output()
+ for line in output.splitlines():
+ m = r.match(line)
+ if m:
+ self.revision = m.groups()[0]
+ return self.revision
+
+ def _query_make_variable(self, variable, make_args=None):
+ make = self.query_exe('make')
+ env = self.query_repack_env()
+ dirs = self.query_abs_dirs()
+ if make_args is None:
+ make_args = []
+ # TODO error checking
+ output = self.get_output_from_command_m(
+ [make, "echo-variable-%s" % variable] + make_args,
+ cwd=dirs['abs_locales_dir'], silent=True,
+ env=env
+ )
+ parser = OutputParser(config=self.config, log_obj=self.log_obj,
+ error_list=MakefileErrorList)
+ parser.add_lines(output)
+ return output.strip()
+
+ def query_base_package_name(self):
+ """Get the package name from the objdir.
+ Only valid after setup is run.
+ """
+ if self.base_package_name:
+ return self.base_package_name
+ self.base_package_name = self._query_make_variable(
+ "PACKAGE",
+ make_args=['AB_CD=%(locale)s']
+ )
+ return self.base_package_name
+
+ def query_version(self):
+ """Get the package name from the objdir.
+ Only valid after setup is run.
+ """
+ if self.version:
+ return self.version
+ c = self.config
+ if c.get('release_config_file'):
+ rc = self.query_release_config()
+ self.version = rc['version']
+ else:
+ self.version = self._query_make_variable("MOZ_APP_VERSION")
+ return self.version
+
+ def query_upload_url(self, locale):
+ if locale in self.upload_urls:
+ return self.upload_urls[locale]
+ else:
+ self.error("Can't determine the upload url for %s!" % locale)
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(MobileSingleLocale, self).query_abs_dirs()
+
+ dirs = {
+ 'abs_tools_dir':
+ os.path.join(abs_dirs['base_work_dir'], 'tools'),
+ 'build_dir':
+ os.path.join(abs_dirs['base_work_dir'], 'build'),
+ }
+
+ abs_dirs.update(dirs)
+ self.abs_dirs = abs_dirs
+ return self.abs_dirs
+
+ def add_failure(self, locale, message, **kwargs):
+ self.locales_property[locale] = "Failed"
+ prop_key = "%s_failure" % locale
+ prop_value = self.query_buildbot_property(prop_key)
+ if prop_value:
+ prop_value = "%s %s" % (prop_value, message)
+ else:
+ prop_value = message
+ self.set_buildbot_property(prop_key, prop_value, write_to_file=True)
+ MercurialScript.add_failure(self, locale, message=message, **kwargs)
+
+ def summary(self):
+ MercurialScript.summary(self)
+ # TODO we probably want to make this configurable on/off
+ locales = self.query_locales()
+ for locale in locales:
+ self.locales_property.setdefault(locale, "Success")
+ self.set_buildbot_property("locales", json.dumps(self.locales_property), write_to_file=True)
+
+ # Actions {{{2
+ def clobber(self):
+ self.read_buildbot_config()
+ dirs = self.query_abs_dirs()
+ c = self.config
+ objdir = os.path.join(dirs['abs_work_dir'], c['mozilla_dir'],
+ c['objdir'])
+ super(MobileSingleLocale, self).clobber(always_clobber_dirs=[objdir])
+
+ def pull(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ repos = []
+ replace_dict = {}
+ if c.get("user_repo_override"):
+ replace_dict['user_repo_override'] = c['user_repo_override']
+ # deepcopy() needed because of self.config lock bug :(
+ for repo_dict in deepcopy(c['repos']):
+ repo_dict['repo'] = repo_dict['repo'] % replace_dict
+ repos.append(repo_dict)
+ else:
+ repos = c['repos']
+ self.vcs_checkout_repos(repos, parent_dir=dirs['abs_work_dir'],
+ tag_override=c.get('tag_override'))
+
+ def clone_locales(self):
+ self.pull_locale_source()
+
+ # list_locales() is defined in LocalesMixin.
+
+ def _setup_configure(self, buildid=None):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ env = self.query_repack_env()
+ make = self.query_exe("make")
+ if self.run_command_m([make, "-f", "client.mk", "configure"],
+ cwd=dirs['abs_mozilla_dir'],
+ env=env,
+ error_list=MakefileErrorList):
+ self.fatal("Configure failed!")
+
+ # Run 'make export' in objdir/config to get nsinstall
+ self.run_command_m([make, 'export'],
+ cwd=os.path.join(dirs['abs_objdir'], 'config'),
+ env=env,
+ error_list=MakefileErrorList,
+ halt_on_failure=True)
+
+ # Run 'make buildid.h' in objdir/ to get the buildid.h file
+ cmd = [make, 'buildid.h']
+ if buildid:
+ cmd.append('MOZ_BUILD_DATE=%s' % str(buildid))
+ self.run_command_m(cmd,
+ cwd=dirs['abs_objdir'],
+ env=env,
+ error_list=MakefileErrorList,
+ halt_on_failure=True)
+
+ def setup(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ mozconfig_path = os.path.join(dirs['abs_mozilla_dir'], '.mozconfig')
+ self.copyfile(os.path.join(dirs['abs_work_dir'], c['mozconfig']),
+ mozconfig_path)
+ # TODO stop using cat
+ cat = self.query_exe("cat")
+ make = self.query_exe("make")
+ self.run_command_m([cat, mozconfig_path])
+ env = self.query_repack_env()
+ if self.config.get("tooltool_config"):
+ self.tooltool_fetch(
+ self.config['tooltool_config']['manifest'],
+ output_dir=self.config['tooltool_config']['output_dir'] % self.query_abs_dirs(),
+ )
+ self._setup_configure()
+ self.run_command_m([make, "wget-en-US"],
+ cwd=dirs['abs_locales_dir'],
+ env=env,
+ error_list=MakefileErrorList,
+ halt_on_failure=True)
+ self.run_command_m([make, "unpack"],
+ cwd=dirs['abs_locales_dir'],
+ env=env,
+ error_list=MakefileErrorList,
+ halt_on_failure=True)
+
+ # on try we want the source we already have, otherwise update to the
+ # same as the en-US binary
+ if self.config.get("update_gecko_source_to_enUS", True):
+ revision = self.query_revision()
+ if not revision:
+ self.fatal("Can't determine revision!")
+ hg = self.query_exe("hg")
+ # TODO do this through VCSMixin instead of hardcoding hg
+ self.run_command_m([hg, "update", "-r", revision],
+ cwd=dirs["abs_mozilla_dir"],
+ env=env,
+ error_list=BaseErrorList,
+ halt_on_failure=True)
+ self.set_buildbot_property('revision', revision, write_to_file=True)
+ # Configure again since the hg update may have invalidated it.
+ buildid = self.query_buildid()
+ self._setup_configure(buildid=buildid)
+
+ def repack(self):
+ # TODO per-locale logs and reporting.
+ dirs = self.query_abs_dirs()
+ locales = self.query_locales()
+ make = self.query_exe("make")
+ repack_env = self.query_repack_env()
+ success_count = total_count = 0
+ for locale in locales:
+ total_count += 1
+ self.enable_mock()
+ result = self.run_compare_locales(locale)
+ self.disable_mock()
+ if result:
+ self.add_failure(locale, message="%s failed in compare-locales!" % locale)
+ continue
+ if self.run_command_m([make, "installers-%s" % locale],
+ cwd=dirs['abs_locales_dir'],
+ env=repack_env,
+ error_list=MakefileErrorList,
+ halt_on_failure=False):
+ self.add_failure(locale, message="%s failed in make installers-%s!" % (locale, locale))
+ continue
+ success_count += 1
+ self.summarize_success_count(success_count, total_count,
+ message="Repacked %d of %d binaries successfully.")
+
+ def validate_repacks_signed(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ locales = self.query_locales()
+ base_package_name = self.query_base_package_name()
+ base_package_dir = os.path.join(dirs['abs_objdir'], 'dist')
+ repack_env = self.query_repack_env()
+ success_count = total_count = 0
+ for locale in locales:
+ total_count += 1
+ signed_path = os.path.join(base_package_dir,
+ base_package_name % {'locale': locale})
+ # We need to wrap what this function does with mock, since
+ # MobileSigningMixin doesn't know about mock
+ self.enable_mock()
+ status = self.verify_android_signature(
+ signed_path,
+ script=c['signature_verification_script'],
+ env=repack_env,
+ key_alias=c['key_alias'],
+ )
+ self.disable_mock()
+ if status:
+ self.add_failure(locale, message="Errors verifying %s binary!" % locale)
+ # No need to rm because upload is per-locale
+ continue
+ success_count += 1
+ self.summarize_success_count(success_count, total_count,
+ message="Validated signatures on %d of %d binaries successfully.")
+
+ def taskcluster_upload(self):
+ auth = os.path.join(os.getcwd(), self.config['taskcluster_credentials_file'])
+ credentials = {}
+ execfile(auth, credentials)
+ client_id = credentials.get('taskcluster_clientId')
+ access_token = credentials.get('taskcluster_accessToken')
+ if not client_id or not access_token:
+ self.warning('Skipping S3 file upload: No taskcluster credentials.')
+ return
+
+ self.activate_virtualenv()
+
+ dirs = self.query_abs_dirs()
+ locales = self.query_locales()
+ make = self.query_exe("make")
+ upload_env = self.query_upload_env()
+ cwd = dirs['abs_locales_dir']
+ branch = self.config['branch']
+ revision = self.query_revision()
+ repo = self.query_l10n_repo()
+ pushinfo = self.vcs_query_pushinfo(repo, revision, vcs='hg')
+ pushdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(pushinfo.pushdate))
+ routes_json = os.path.join(self.query_abs_dirs()['abs_mozilla_dir'],
+ 'testing/mozharness/configs/routes.json')
+ with open(routes_json) as routes_file:
+ contents = json.load(routes_file)
+ templates = contents['l10n']
+
+ for locale in locales:
+ output = self.get_output_from_command_m(
+ "%s echo-variable-UPLOAD_FILES AB_CD=%s" % (make, locale),
+ cwd=cwd,
+ env=upload_env,
+ )
+ files = shlex.split(output)
+ abs_files = [os.path.abspath(os.path.join(cwd, f)) for f in files]
+
+ routes = []
+ fmt = {
+ 'index': self.config.get('taskcluster_index', 'index.garbage.staging'),
+ 'project': branch,
+ 'head_rev': revision,
+ 'pushdate': pushdate,
+ 'year': pushdate[0:4],
+ 'month': pushdate[4:6],
+ 'day': pushdate[6:8],
+ 'build_product': self.config['stage_product'],
+ 'build_name': self.query_build_name(),
+ 'build_type': self.query_build_type(),
+ 'locale': locale,
+ }
+ for template in templates:
+ routes.append(template.format(**fmt))
+
+ self.info('Using routes: %s' % routes)
+ tc = Taskcluster(branch,
+ pushinfo.pushdate, # Use pushdate as the rank
+ client_id,
+ access_token,
+ self.log_obj,
+ )
+ task = tc.create_task(routes)
+ tc.claim_task(task)
+
+ for upload_file in abs_files:
+ tc.create_artifact(task, upload_file)
+ tc.report_completed(task)
+
+ def upload_repacks(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ locales = self.query_locales()
+ make = self.query_exe("make")
+ base_package_name = self.query_base_package_name()
+ version = self.query_version()
+ upload_env = self.query_upload_env()
+ success_count = total_count = 0
+ buildnum = None
+ if c.get('release_config_file'):
+ rc = self.query_release_config()
+ buildnum = rc['buildnum']
+ for locale in locales:
+ if self.query_failure(locale):
+ self.warning("Skipping previously failed locale %s." % locale)
+ continue
+ total_count += 1
+ if c.get('base_post_upload_cmd'):
+ upload_env['POST_UPLOAD_CMD'] = c['base_post_upload_cmd'] % {'version': version, 'locale': locale, 'buildnum': str(buildnum), 'post_upload_extra': ' '.join(c.get('post_upload_extra', []))}
+ output = self.get_output_from_command_m(
+ # Ugly hack to avoid |make upload| stderr from showing up
+ # as get_output_from_command errors
+ "%s upload AB_CD=%s 2>&1" % (make, locale),
+ cwd=dirs['abs_locales_dir'],
+ env=upload_env,
+ silent=True
+ )
+ parser = OutputParser(config=self.config, log_obj=self.log_obj,
+ error_list=MakefileErrorList)
+ parser.add_lines(output)
+ if parser.num_errors:
+ self.add_failure(locale, message="%s failed in make upload!" % (locale))
+ continue
+ package_name = base_package_name % {'locale': locale}
+ r = re.compile("(http.*%s)" % package_name)
+ for line in output.splitlines():
+ m = r.match(line)
+ if m:
+ self.upload_urls[locale] = m.groups()[0]
+ self.info("Found upload url %s" % self.upload_urls[locale])
+ success_count += 1
+ self.summarize_success_count(success_count, total_count,
+ message="Make Upload for %d of %d locales successful.")
+
+ def checkout_tools(self):
+ dirs = self.query_abs_dirs()
+
+ # We need hg.m.o/build/tools checked out
+ self.info("Checking out tools")
+ repos = [{
+ 'repo': self.config['tools_repo'],
+ 'vcs': "hg",
+ 'branch': "default",
+ 'dest': dirs['abs_tools_dir'],
+ }]
+ rev = self.vcs_checkout(**repos[0])
+ self.set_buildbot_property("tools_revision", rev, write_to_file=True)
+
+ def query_apkfile_path(self,locale):
+
+ dirs = self.query_abs_dirs()
+ apkdir = os.path.join(dirs['abs_objdir'], 'dist')
+ r = r"(\.)" + re.escape(locale) + r"(\.*)"
+
+ apks = []
+ for f in os.listdir(apkdir):
+ if f.endswith(".apk") and re.search(r, f):
+ apks.append(f)
+ if len(apks) == 0:
+ self.fatal("Found no apks files in %s, don't know what to do:\n%s" % (apkdir, apks), exit_code=1)
+
+ return os.path.join(apkdir, apks[0])
+
+ def query_is_release_or_beta(self):
+
+ return bool(self.config.get("is_release_or_beta"))
+
+ def submit_to_balrog(self):
+
+ if not self.query_is_nightly() and not self.query_is_release_or_beta():
+ self.info("Not a nightly or release build, skipping balrog submission.")
+ return
+
+ if not self.config.get("balrog_servers"):
+ self.info("balrog_servers not set; skipping balrog submission.")
+ return
+
+ self.checkout_tools()
+
+ dirs = self.query_abs_dirs()
+ locales = self.query_locales()
+ balrogReady = True
+ for locale in locales:
+ apk_url = self.query_upload_url(locale)
+ if not apk_url:
+ self.add_failure(locale, message="Failed to detect %s url in make upload!" % (locale))
+ balrogReady = False
+ continue
+ if not balrogReady:
+ return self.fatal(message="Not all repacks successful, abort without submitting to balrog")
+
+ for locale in locales:
+ apkfile = self.query_apkfile_path(locale)
+ apk_url = self.query_upload_url(locale)
+
+ # Set other necessary properties for Balrog submission. None need to
+ # be passed back to buildbot, so we won't write them to the properties
+ #files.
+ self.set_buildbot_property("locale", locale)
+
+ self.set_buildbot_property("appVersion", self.query_version())
+ # The Balrog submitter translates this platform into a build target
+ # via https://github.com/mozilla/build-tools/blob/master/lib/python/release/platforms.py#L23
+ self.set_buildbot_property("platform", self.buildbot_config["properties"]["platform"])
+ #TODO: Is there a better way to get this?
+
+ self.set_buildbot_property("appName", "Fennec")
+ # TODO: don't hardcode
+ self.set_buildbot_property("hashType", "sha512")
+ self.set_buildbot_property("completeMarSize", self.query_filesize(apkfile))
+ self.set_buildbot_property("completeMarHash", self.query_sha512sum(apkfile))
+ self.set_buildbot_property("completeMarUrl", apk_url)
+ self.set_buildbot_property("isOSUpdate", False)
+ self.set_buildbot_property("buildid", self.query_buildid())
+
+ if self.query_is_nightly():
+ self.submit_balrog_updates(release_type="nightly")
+ else:
+ self.submit_balrog_updates(release_type="release")
+ if not self.query_is_nightly():
+ self.submit_balrog_release_pusher(dirs)
+
+# main {{{1
+if __name__ == '__main__':
+ single_locale = MobileSingleLocale()
+ single_locale.run_and_exit()
diff --git a/testing/mozharness/scripts/mobile_partner_repack.py b/testing/mozharness/scripts/mobile_partner_repack.py
new file mode 100755
index 000000000..8d99f825a
--- /dev/null
+++ b/testing/mozharness/scripts/mobile_partner_repack.py
@@ -0,0 +1,327 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""mobile_partner_repack.py
+
+"""
+
+from copy import deepcopy
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.errors import ZipErrorList
+from mozharness.base.log import FATAL
+from mozharness.base.transfer import TransferMixin
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.l10n.locales import LocalesMixin
+from mozharness.mozilla.release import ReleaseMixin
+from mozharness.mozilla.signing import MobileSigningMixin
+
+SUPPORTED_PLATFORMS = ["android"]
+
+
+# MobilePartnerRepack {{{1
+class MobilePartnerRepack(LocalesMixin, ReleaseMixin, MobileSigningMixin,
+ TransferMixin, MercurialScript):
+ config_options = [[
+ ['--locale', ],
+ {"action": "extend",
+ "dest": "locales",
+ "type": "string",
+ "help": "Specify the locale(s) to repack"
+ }
+ ], [
+ ['--partner', ],
+ {"action": "extend",
+ "dest": "partners",
+ "type": "string",
+ "help": "Specify the partner(s) to repack"
+ }
+ ], [
+ ['--locales-file', ],
+ {"action": "store",
+ "dest": "locales_file",
+ "type": "string",
+ "help": "Specify a json file to determine which locales to repack"
+ }
+ ], [
+ ['--tag-override', ],
+ {"action": "store",
+ "dest": "tag_override",
+ "type": "string",
+ "help": "Override the tags set for all repos"
+ }
+ ], [
+ ['--platform', ],
+ {"action": "extend",
+ "dest": "platforms",
+ "type": "choice",
+ "choices": SUPPORTED_PLATFORMS,
+ "help": "Specify the platform(s) to repack"
+ }
+ ], [
+ ['--user-repo-override', ],
+ {"action": "store",
+ "dest": "user_repo_override",
+ "type": "string",
+ "help": "Override the user repo path for all repos"
+ }
+ ], [
+ ['--release-config-file', ],
+ {"action": "store",
+ "dest": "release_config_file",
+ "type": "string",
+ "help": "Specify the release config file to use"
+ }
+ ], [
+ ['--version', ],
+ {"action": "store",
+ "dest": "version",
+ "type": "string",
+ "help": "Specify the current version"
+ }
+ ], [
+ ['--buildnum', ],
+ {"action": "store",
+ "dest": "buildnum",
+ "type": "int",
+ "default": 1,
+ "metavar": "INT",
+ "help": "Specify the current release build num (e.g. build1, build2)"
+ }
+ ]]
+
+ def __init__(self, require_config_file=True):
+ self.release_config = {}
+ LocalesMixin.__init__(self)
+ MercurialScript.__init__(
+ self,
+ config_options=self.config_options,
+ all_actions=[
+ "passphrase",
+ "clobber",
+ "pull",
+ "download",
+ "repack",
+ "upload-unsigned-bits",
+ "sign",
+ "upload-signed-bits",
+ "summary",
+ ],
+ require_config_file=require_config_file
+ )
+
+ # Helper methods {{{2
+ def add_failure(self, platform, locale, **kwargs):
+ s = "%s:%s" % (platform, locale)
+ if 'message' in kwargs:
+ kwargs['message'] = kwargs['message'] % {'platform': platform, 'locale': locale}
+ super(MobilePartnerRepack, self).add_failure(s, **kwargs)
+
+ def query_failure(self, platform, locale):
+ s = "%s:%s" % (platform, locale)
+ return super(MobilePartnerRepack, self).query_failure(s)
+
+ # Actions {{{2
+
+ def pull(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ repos = []
+ replace_dict = {}
+ if c.get("user_repo_override"):
+ replace_dict['user_repo_override'] = c['user_repo_override']
+ # deepcopy() needed because of self.config lock bug :(
+ for repo_dict in deepcopy(c['repos']):
+ repo_dict['repo'] = repo_dict['repo'] % replace_dict
+ repos.append(repo_dict)
+ else:
+ repos = c['repos']
+ self.vcs_checkout_repos(repos, parent_dir=dirs['abs_work_dir'],
+ tag_override=c.get('tag_override'))
+
+ def download(self):
+ c = self.config
+ rc = self.query_release_config()
+ dirs = self.query_abs_dirs()
+ locales = self.query_locales()
+ replace_dict = {
+ 'buildnum': rc['buildnum'],
+ 'version': rc['version'],
+ }
+ success_count = total_count = 0
+ for platform in c['platforms']:
+ base_installer_name = c['installer_base_names'][platform]
+ base_url = c['download_base_url'] + '/' + \
+ c['download_unsigned_base_subdir'] + '/' + \
+ base_installer_name
+ replace_dict['platform'] = platform
+ for locale in locales:
+ replace_dict['locale'] = locale
+ url = base_url % replace_dict
+ installer_name = base_installer_name % replace_dict
+ parent_dir = '%s/original/%s/%s' % (dirs['abs_work_dir'],
+ platform, locale)
+ file_path = '%s/%s' % (parent_dir, installer_name)
+ self.mkdir_p(parent_dir)
+ total_count += 1
+ if not self.download_file(url, file_path):
+ self.add_failure(platform, locale,
+ message="Unable to download %(platform)s:%(locale)s installer!")
+ else:
+ success_count += 1
+ self.summarize_success_count(success_count, total_count,
+ message="Downloaded %d of %d installers successfully.")
+
+ def _repack_apk(self, partner, orig_path, repack_path):
+ """ Repack the apk with a partner update channel.
+ Returns True for success, None for failure
+ """
+ dirs = self.query_abs_dirs()
+ zip_bin = self.query_exe("zip")
+ unzip_bin = self.query_exe("unzip")
+ file_name = os.path.basename(orig_path)
+ tmp_dir = os.path.join(dirs['abs_work_dir'], 'tmp')
+ tmp_file = os.path.join(tmp_dir, file_name)
+ tmp_prefs_dir = os.path.join(tmp_dir, 'defaults', 'pref')
+ # Error checking for each step.
+ # Ignoring the mkdir_p()s since the subsequent copyfile()s will
+ # error out if unsuccessful.
+ if self.rmtree(tmp_dir):
+ return
+ self.mkdir_p(tmp_prefs_dir)
+ if self.copyfile(orig_path, tmp_file):
+ return
+ if self.write_to_file(os.path.join(tmp_prefs_dir, 'partner.js'),
+ 'pref("app.partner.%s", "%s");' % (partner, partner)
+ ) is None:
+ return
+ if self.run_command([unzip_bin, '-q', file_name, 'omni.ja'],
+ error_list=ZipErrorList,
+ return_type='num_errors',
+ cwd=tmp_dir):
+ self.error("Can't extract omni.ja from %s!" % file_name)
+ return
+ if self.run_command([zip_bin, '-9r', 'omni.ja',
+ 'defaults/pref/partner.js'],
+ error_list=ZipErrorList,
+ return_type='num_errors',
+ cwd=tmp_dir):
+ self.error("Can't add partner.js to omni.ja!")
+ return
+ if self.run_command([zip_bin, '-9r', file_name, 'omni.ja'],
+ error_list=ZipErrorList,
+ return_type='num_errors',
+ cwd=tmp_dir):
+ self.error("Can't re-add omni.ja to %s!" % file_name)
+ return
+ if self.unsign_apk(tmp_file):
+ return
+ repack_dir = os.path.dirname(repack_path)
+ self.mkdir_p(repack_dir)
+ if self.copyfile(tmp_file, repack_path):
+ return
+ return True
+
+ def repack(self):
+ c = self.config
+ rc = self.query_release_config()
+ dirs = self.query_abs_dirs()
+ locales = self.query_locales()
+ success_count = total_count = 0
+ for platform in c['platforms']:
+ for locale in locales:
+ installer_name = c['installer_base_names'][platform] % {'version': rc['version'], 'locale': locale}
+ if self.query_failure(platform, locale):
+ self.warning("%s:%s had previous issues; skipping!" % (platform, locale))
+ continue
+ original_path = '%s/original/%s/%s/%s' % (dirs['abs_work_dir'], platform, locale, installer_name)
+ for partner in c['partner_config'].keys():
+ repack_path = '%s/unsigned/partner-repacks/%s/%s/%s/%s' % (dirs['abs_work_dir'], partner, platform, locale, installer_name)
+ total_count += 1
+ if self._repack_apk(partner, original_path, repack_path):
+ success_count += 1
+ else:
+ self.add_failure(platform, locale,
+ message="Unable to repack %(platform)s:%(locale)s installer!")
+ self.summarize_success_count(success_count, total_count,
+ message="Repacked %d of %d installers successfully.")
+
+ def _upload(self, dir_name="unsigned/partner-repacks"):
+ c = self.config
+ dirs = self.query_abs_dirs()
+ local_path = os.path.join(dirs['abs_work_dir'], dir_name)
+ rc = self.query_release_config()
+ replace_dict = {
+ 'buildnum': rc['buildnum'],
+ 'version': rc['version'],
+ }
+ remote_path = '%s/%s' % (c['ftp_upload_base_dir'] % replace_dict, dir_name)
+ if self.rsync_upload_directory(local_path, c['ftp_ssh_key'],
+ c['ftp_user'], c['ftp_server'],
+ remote_path):
+ self.return_code += 1
+
+ def upload_unsigned_bits(self):
+ self._upload()
+
+ # passphrase() in AndroidSigningMixin
+ # verify_passphrases() in AndroidSigningMixin
+
+ def preflight_sign(self):
+ if 'passphrase' not in self.actions:
+ self.passphrase()
+ self.verify_passphrases()
+
+ def sign(self):
+ c = self.config
+ rc = self.query_release_config()
+ dirs = self.query_abs_dirs()
+ locales = self.query_locales()
+ success_count = total_count = 0
+ for platform in c['platforms']:
+ for locale in locales:
+ installer_name = c['installer_base_names'][platform] % {'version': rc['version'], 'locale': locale}
+ if self.query_failure(platform, locale):
+ self.warning("%s:%s had previous issues; skipping!" % (platform, locale))
+ continue
+ for partner in c['partner_config'].keys():
+ unsigned_path = '%s/unsigned/partner-repacks/%s/%s/%s/%s' % (dirs['abs_work_dir'], partner, platform, locale, installer_name)
+ signed_dir = '%s/partner-repacks/%s/%s/%s' % (dirs['abs_work_dir'], partner, platform, locale)
+ signed_path = "%s/%s" % (signed_dir, installer_name)
+ total_count += 1
+ self.info("Signing %s %s." % (platform, locale))
+ if not os.path.exists(unsigned_path):
+ self.error("Missing apk %s!" % unsigned_path)
+ continue
+ if self.sign_apk(unsigned_path, c['keystore'],
+ self.store_passphrase, self.key_passphrase,
+ c['key_alias']) != 0:
+ self.add_summary("Unable to sign %s:%s apk!" % (platform, locale), level=FATAL)
+ else:
+ self.mkdir_p(signed_dir)
+ if self.align_apk(unsigned_path, signed_path):
+ self.add_failure(platform, locale,
+ message="Unable to align %(platform)s%(locale)s apk!")
+ self.rmtree(signed_dir)
+ else:
+ success_count += 1
+ self.summarize_success_count(success_count, total_count,
+ message="Signed %d of %d apks successfully.")
+
+ # TODO verify signatures.
+
+ def upload_signed_bits(self):
+ self._upload(dir_name="partner-repacks")
+
+
+# main {{{1
+if __name__ == '__main__':
+ mobile_partner_repack = MobilePartnerRepack()
+ mobile_partner_repack.run_and_exit()
diff --git a/testing/mozharness/scripts/multil10n.py b/testing/mozharness/scripts/multil10n.py
new file mode 100755
index 000000000..c89caf7c6
--- /dev/null
+++ b/testing/mozharness/scripts/multil10n.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""multil10n.py
+
+"""
+
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.mozilla.l10n.multi_locale_build import MultiLocaleBuild
+
+if __name__ == '__main__':
+ multi_locale_build = MultiLocaleBuild()
+ multi_locale_build.run_and_exit()
diff --git a/testing/mozharness/scripts/openh264_build.py b/testing/mozharness/scripts/openh264_build.py
new file mode 100644
index 000000000..072d102d5
--- /dev/null
+++ b/testing/mozharness/scripts/openh264_build.py
@@ -0,0 +1,250 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+import sys
+import os
+import glob
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+# import the guts
+from mozharness.base.vcs.vcsbase import VCSScript
+from mozharness.base.log import ERROR
+from mozharness.base.transfer import TransferMixin
+from mozharness.mozilla.mock import MockMixin
+
+
+class OpenH264Build(MockMixin, TransferMixin, VCSScript):
+ all_actions = [
+ 'clobber',
+ 'checkout-sources',
+ 'build',
+ 'test',
+ 'package',
+ 'upload',
+ ]
+
+ default_actions = [
+ 'checkout-sources',
+ 'build',
+ 'test',
+ 'package',
+ ]
+
+ config_options = [
+ [["--repo"], {
+ "dest": "repo",
+ "help": "OpenH264 repository to use",
+ "default": "https://github.com/cisco/openh264.git"
+ }],
+ [["--rev"], {
+ "dest": "revision",
+ "help": "revision to checkout",
+ "default": "master"
+ }],
+ [["--debug"], {
+ "dest": "debug_build",
+ "action": "store_true",
+ "help": "Do a debug build",
+ }],
+ [["--64"], {
+ "dest": "64bit",
+ "action": "store_true",
+ "help": "Do a 64-bit build",
+ "default": True,
+ }],
+ [["--32"], {
+ "dest": "64bit",
+ "action": "store_false",
+ "help": "Do a 32-bit build",
+ }],
+ [["--os"], {
+ "dest": "operating_system",
+ "help": "Specify the operating system to build for",
+ }],
+ [["--use-mock"], {
+ "dest": "use_mock",
+ "help": "use mock to set up build environment",
+ "action": "store_true",
+ "default": False,
+ }],
+ [["--use-yasm"], {
+ "dest": "use_yasm",
+ "help": "use yasm instead of nasm",
+ "action": "store_true",
+ "default": False,
+ }],
+ ]
+
+ def __init__(self, require_config_file=False, config={},
+ all_actions=all_actions,
+ default_actions=default_actions):
+
+ # Default configuration
+ default_config = {
+ 'debug_build': False,
+ 'mock_target': 'mozilla-centos6-x86_64',
+ 'mock_packages': ['make', 'git', 'nasm', 'glibc-devel.i686', 'libstdc++-devel.i686', 'zip', 'yasm'],
+ 'mock_files': [],
+ 'upload_ssh_key': os.path.expanduser("~/.ssh/ffxbld_rsa"),
+ 'upload_ssh_user': 'ffxbld',
+ 'upload_ssh_host': 'stage.mozilla.org',
+ 'upload_path_base': '/home/ffxbld/openh264',
+ 'use_yasm': False,
+ }
+ default_config.update(config)
+
+ VCSScript.__init__(
+ self,
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ config=default_config,
+ all_actions=all_actions,
+ default_actions=default_actions,
+ )
+
+ if self.config['use_mock']:
+ self.setup_mock()
+ self.enable_mock()
+
+ def query_package_name(self):
+ if self.config['64bit']:
+ bits = '64'
+ else:
+ bits = '32'
+
+ version = self.config['revision']
+
+ if sys.platform == 'linux2':
+ if self.config.get('operating_system') == 'android':
+ return 'openh264-android-{version}.zip'.format(version=version, bits=bits)
+ else:
+ return 'openh264-linux{bits}-{version}.zip'.format(version=version, bits=bits)
+ elif sys.platform == 'darwin':
+ return 'openh264-macosx{bits}-{version}.zip'.format(version=version, bits=bits)
+ elif sys.platform == 'win32':
+ return 'openh264-win{bits}-{version}.zip'.format(version=version, bits=bits)
+ self.fatal("can't determine platform")
+
+ def query_make_params(self):
+ retval = []
+ if self.config['debug_build']:
+ retval.append('BUILDTYPE=Debug')
+
+ if self.config['64bit']:
+ retval.append('ENABLE64BIT=Yes')
+ else:
+ retval.append('ENABLE64BIT=No')
+
+ if "operating_system" in self.config:
+ retval.append("OS=%s" % self.config['operating_system'])
+
+ if self.config['use_yasm']:
+ retval.append('ASM=yasm')
+
+ return retval
+
+ def query_upload_ssh_key(self):
+ return self.config['upload_ssh_key']
+
+ def query_upload_ssh_host(self):
+ return self.config['upload_ssh_host']
+
+ def query_upload_ssh_user(self):
+ return self.config['upload_ssh_user']
+
+ def query_upload_ssh_path(self):
+ return "%s/%s" % (self.config['upload_path_base'], self.config['revision'])
+
+ def run_make(self, target):
+ cmd = ['make', target] + self.query_make_params()
+ dirs = self.query_abs_dirs()
+ repo_dir = os.path.join(dirs['abs_work_dir'], 'src')
+ return self.run_command(cmd, cwd=repo_dir)
+
+ def checkout_sources(self):
+ repo = self.config['repo']
+ rev = self.config['revision']
+
+ dirs = self.query_abs_dirs()
+ repo_dir = os.path.join(dirs['abs_work_dir'], 'src')
+
+ repos = [
+ {'vcs': 'gittool', 'repo': repo, 'dest': repo_dir, 'revision': rev},
+ ]
+
+ # self.vcs_checkout already retries, so no need to wrap it in
+ # self.retry. We set the error_level to ERROR to prevent it going fatal
+ # so we can do our own handling here.
+ retval = self.vcs_checkout_repos(repos, error_level=ERROR)
+ if not retval:
+ self.rmtree(repo_dir)
+ self.fatal("Automation Error: couldn't clone repo", exit_code=4)
+
+ # Checkout gmp-api
+ # TODO: Nothing here updates it yet, or enforces versions!
+ if not os.path.exists(os.path.join(repo_dir, 'gmp-api')):
+ retval = self.run_make('gmp-bootstrap')
+ if retval != 0:
+ self.fatal("couldn't bootstrap gmp")
+ else:
+ self.info("skipping gmp bootstrap - we have it locally")
+
+ # Checkout gtest
+ # TODO: Requires svn!
+ if not os.path.exists(os.path.join(repo_dir, 'gtest')):
+ retval = self.run_make('gtest-bootstrap')
+ if retval != 0:
+ self.fatal("couldn't bootstrap gtest")
+ else:
+ self.info("skipping gtest bootstrap - we have it locally")
+
+ return retval
+
+ def build(self):
+ retval = self.run_make('plugin')
+ if retval != 0:
+ self.fatal("couldn't build plugin")
+
+ def package(self):
+ dirs = self.query_abs_dirs()
+ srcdir = os.path.join(dirs['abs_work_dir'], 'src')
+ package_name = self.query_package_name()
+ package_file = os.path.join(dirs['abs_work_dir'], package_name)
+ if os.path.exists(package_file):
+ os.unlink(package_file)
+ to_package = [os.path.basename(f) for f in glob.glob(os.path.join(srcdir, "*gmpopenh264*"))]
+ cmd = ['zip', package_file] + to_package
+ retval = self.run_command(cmd, cwd=srcdir)
+ if retval != 0:
+ self.fatal("couldn't make package")
+ self.copy_to_upload_dir(package_file)
+
+ def upload(self):
+ if self.config['use_mock']:
+ self.disable_mock()
+ dirs = self.query_abs_dirs()
+ self.rsync_upload_directory(
+ dirs['abs_upload_dir'],
+ self.query_upload_ssh_key(),
+ self.query_upload_ssh_user(),
+ self.query_upload_ssh_host(),
+ self.query_upload_ssh_path(),
+ )
+ if self.config['use_mock']:
+ self.enable_mock()
+
+ def test(self):
+ retval = self.run_make('test')
+ if retval != 0:
+ self.fatal("test failures")
+
+
+# main {{{1
+if __name__ == '__main__':
+ myScript = OpenH264Build()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/release/antivirus.py b/testing/mozharness/scripts/release/antivirus.py
new file mode 100644
index 000000000..b40dc5cc0
--- /dev/null
+++ b/testing/mozharness/scripts/release/antivirus.py
@@ -0,0 +1,193 @@
+from multiprocessing.pool import ThreadPool
+import os
+import re
+import sys
+import shutil
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+
+
+class AntivirusScan(BaseScript, VirtualenvMixin):
+ config_options = [
+ [["--product"], {
+ "dest": "product",
+ "help": "Product being released, eg: firefox, thunderbird",
+ }],
+ [["--version"], {
+ "dest": "version",
+ "help": "Version of release, eg: 39.0b5",
+ }],
+ [["--build-number"], {
+ "dest": "build_number",
+ "help": "Build number of release, eg: 2",
+ }],
+ [["--bucket-name"], {
+ "dest": "bucket_name",
+ "help": "S3 Bucket to retrieve files from",
+ }],
+ [["--exclude"], {
+ "dest": "excludes",
+ "action": "append",
+ "help": "List of filename patterns to exclude. See script source for default",
+ }],
+ [["-d", "--download-parallelization"], {
+ "dest": "download_parallelization",
+ "default": 6,
+ "type": "int",
+ "help": "Number of concurrent file downloads",
+ }],
+ [["-s", "--scan-parallelization"], {
+ "dest": "scan_parallelization",
+ "default": 4,
+ "type": "int",
+ "help": "Number of concurrent file scans",
+ }],
+ [["--tools-repo"], {
+ "dest": "tools_repo",
+ "default": "https://hg.mozilla.org/build/tools",
+ }],
+ [["--tools-revision"], {
+ "dest": "tools_revision",
+ "help": "Revision of tools repo to use when downloading extract_and_run_command.py",
+ }],
+ ] + virtualenv_config_options
+
+ DEFAULT_EXCLUDES = [
+ r"^.*tests.*$",
+ r"^.*crashreporter.*$",
+ r"^.*\.zip(\.asc)?$",
+ r"^.*\.log$",
+ r"^.*\.txt$",
+ r"^.*\.asc$",
+ r"^.*/partner-repacks.*$",
+ r"^.*.checksums(\.asc)?$",
+ r"^.*/logs/.*$",
+ r"^.*/jsshell.*$",
+ r"^.*json$",
+ r"^.*/host.*$",
+ r"^.*/mar-tools/.*$",
+ r"^.*robocop.apk$",
+ r"^.*contrib.*"
+ ]
+ CACHE_DIR = 'cache'
+
+ def __init__(self):
+ BaseScript.__init__(self,
+ config_options=self.config_options,
+ require_config_file=False,
+ config={
+ "virtualenv_modules": [
+ "boto",
+ "redo",
+ "mar",
+ ],
+ "virtualenv_path": "venv",
+ },
+ all_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "get-extract-script",
+ "get-files",
+ "scan-files",
+ "cleanup-cache",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "get-extract-script",
+ "get-files",
+ "scan-files",
+ "cleanup-cache",
+ ],
+ )
+ self.excludes = self.config.get('excludes', self.DEFAULT_EXCLUDES)
+ self.dest_dir = self.CACHE_DIR
+
+ def _get_candidates_prefix(self):
+ return "pub/{}/candidates/{}-candidates/build{}/".format(
+ self.config['product'],
+ self.config["version"],
+ self.config["build_number"]
+ )
+
+ def _matches_exclude(self, keyname):
+ for exclude in self.excludes:
+ if re.search(exclude, keyname):
+ return True
+ return False
+
+ def get_extract_script(self):
+ """Gets a copy of extract_and_run_command.py from tools, and the supporting mar.py,
+ so that we can unpack various files for clam to scan them."""
+ remote_file = "{}/raw-file/{}/stage/extract_and_run_command.py".format(self.config["tools_repo"],
+ self.config["tools_revision"])
+ self.download_file(remote_file, file_name="extract_and_run_command.py")
+
+ def get_files(self):
+ """Pull the candidate files down from S3 for scanning, using parallel requests"""
+ from boto.s3.connection import S3Connection
+ from boto.exception import S3CopyError, S3ResponseError
+ from redo import retry
+ from httplib import HTTPException
+
+ # suppress boto debug logging, it's too verbose with --loglevel=debug
+ import logging
+ logging.getLogger('boto').setLevel(logging.INFO)
+
+ self.info("Connecting to S3")
+ conn = S3Connection(anon=True)
+ self.info("Getting bucket {}".format(self.config["bucket_name"]))
+ bucket = conn.get_bucket(self.config["bucket_name"])
+
+ if os.path.exists(self.dest_dir):
+ self.info('Emptying {}'.format(self.dest_dir))
+ shutil.rmtree(self.dest_dir)
+ os.makedirs(self.dest_dir)
+
+ def worker(item):
+ source, destination = item
+
+ self.info("Downloading {} to {}".format(source, destination))
+ key = bucket.get_key(source)
+ return retry(key.get_contents_to_filename,
+ args=(destination, ),
+ sleeptime=30, max_sleeptime=150,
+ retry_exceptions=(S3CopyError, S3ResponseError,
+ IOError, HTTPException))
+
+ def find_release_files():
+ candidates_prefix = self._get_candidates_prefix()
+ self.info("Getting key names from candidates")
+ for key in bucket.list(prefix=candidates_prefix):
+ keyname = key.name
+ if self._matches_exclude(keyname):
+ self.debug("Excluding {}".format(keyname))
+ else:
+ destination = os.path.join(self.dest_dir, keyname.replace(candidates_prefix, ''))
+ dest_dir = os.path.dirname(destination)
+ if not os.path.isdir(dest_dir):
+ os.makedirs(dest_dir)
+ yield (keyname, destination)
+
+ pool = ThreadPool(self.config["download_parallelization"])
+ pool.map(worker, find_release_files())
+
+ def scan_files(self):
+ """Scan the files we've collected. We do the download and scan concurrently to make
+ it easier to have a coherent log afterwards. Uses the venv python."""
+ self.run_command([self.query_python_path(), 'extract_and_run_command.py',
+ '-j{}'.format(self.config['scan_parallelization']),
+ 'clamdscan', '-m', '--no-summary', '--', self.dest_dir])
+
+ def cleanup_cache(self):
+ """If we have simultaneous releases in flight an av slave may end up doing another
+ av job before being recycled, and we need to make sure the full disk is available."""
+ shutil.rmtree(self.dest_dir)
+
+
+if __name__ == "__main__":
+ myScript = AntivirusScan()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/release/beet_mover.py b/testing/mozharness/scripts/release/beet_mover.py
new file mode 100755
index 000000000..adc8b19e1
--- /dev/null
+++ b/testing/mozharness/scripts/release/beet_mover.py
@@ -0,0 +1,372 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""beet_mover.py.
+
+downloads artifacts, scans them and uploads them to s3
+"""
+import hashlib
+import sys
+import os
+import pprint
+import re
+from os import listdir
+from os.path import isfile, join
+import sh
+import redo
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+from mozharness.base.log import FATAL
+from mozharness.base.python import VirtualenvMixin
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.aws import pop_aws_auth_from_env
+import mozharness
+import mimetypes
+
+
+def get_hash(content, hash_type="md5"):
+ h = hashlib.new(hash_type)
+ h.update(content)
+ return h.hexdigest()
+
+
+CONFIG_OPTIONS = [
+ [["--template"], {
+ "dest": "template",
+ "help": "Specify jinja2 template file",
+ }],
+ [['--locale', ], {
+ "action": "extend",
+ "dest": "locales",
+ "type": "string",
+ "help": "Specify the locale(s) to upload."}],
+ [["--platform"], {
+ "dest": "platform",
+ "help": "Specify the platform of the build",
+ }],
+ [["--version"], {
+ "dest": "version",
+ "help": "full release version based on gecko and tag/stage identifier. e.g. '44.0b1'"
+ }],
+ [["--app-version"], {
+ "dest": "app_version",
+ "help": "numbered version based on gecko. e.g. '44.0'"
+ }],
+ [["--partial-version"], {
+ "dest": "partial_version",
+ "help": "the partial version the mar is based off of"
+ }],
+ [["--artifact-subdir"], {
+ "dest": "artifact_subdir",
+ "default": 'build',
+ "help": "subdir location for taskcluster artifacts after public/ base.",
+ }],
+ [["--build-num"], {
+ "dest": "build_num",
+ "help": "the release build identifier"
+ }],
+ [["--taskid"], {
+ "dest": "taskid",
+ "help": "taskcluster task id to download artifacts from",
+ }],
+ [["--bucket"], {
+ "dest": "bucket",
+ "help": "s3 bucket to move beets to.",
+ }],
+ [["--product"], {
+ "dest": "product",
+ "help": "product for which artifacts are beetmoved",
+ }],
+ [["--exclude"], {
+ "dest": "excludes",
+ "action": "append",
+ "help": "List of filename patterns to exclude. See script source for default",
+ }],
+ [["-s", "--scan-parallelization"], {
+ "dest": "scan_parallelization",
+ "default": 4,
+ "type": "int",
+ "help": "Number of concurrent file scans",
+ }],
+]
+
+DEFAULT_EXCLUDES = [
+ r"^.*tests.*$",
+ r"^.*crashreporter.*$",
+ r"^.*\.zip(\.asc)?$",
+ r"^.*\.log$",
+ r"^.*\.txt$",
+ r"^.*\.asc$",
+ r"^.*/partner-repacks.*$",
+ r"^.*.checksums(\.asc)?$",
+ r"^.*/logs/.*$",
+ r"^.*/jsshell.*$",
+ r"^.*json$",
+ r"^.*/host.*$",
+ r"^.*/mar-tools/.*$",
+ r"^.*robocop.apk$",
+ r"^.*contrib.*"
+]
+CACHE_DIR = 'cache'
+
+MIME_MAP = {
+ '': 'text/plain',
+ '.asc': 'text/plain',
+ '.beet': 'text/plain',
+ '.bundle': 'application/octet-stream',
+ '.bz2': 'application/octet-stream',
+ '.checksums': 'text/plain',
+ '.dmg': 'application/x-iso9660-image',
+ '.mar': 'application/octet-stream',
+ '.xpi': 'application/x-xpinstall'
+}
+
+HASH_FORMATS = ["sha512", "sha256"]
+
+
+class BeetMover(BaseScript, VirtualenvMixin, object):
+ def __init__(self, aws_creds):
+ beetmover_kwargs = {
+ 'config_options': CONFIG_OPTIONS,
+ 'all_actions': [
+ # 'clobber',
+ 'create-virtualenv',
+ 'activate-virtualenv',
+ 'generate-candidates-manifest',
+ 'refresh-antivirus',
+ 'verify-bits', # beets
+ 'download-bits', # beets
+ 'scan-bits', # beets
+ 'upload-bits', # beets
+ ],
+ 'require_config_file': False,
+ # Default configuration
+ 'config': {
+ # base index url where to find taskcluster artifact based on taskid
+ "artifact_base_url": 'https://queue.taskcluster.net/v1/task/{taskid}/artifacts/public/{subdir}',
+ "virtualenv_modules": [
+ "boto",
+ "PyYAML",
+ "Jinja2",
+ "redo",
+ "cryptography==2.0.3",
+ "mar",
+ ],
+ "virtualenv_path": "venv",
+ },
+ }
+ #todo do excludes need to be configured via command line for specific builds?
+ super(BeetMover, self).__init__(**beetmover_kwargs)
+
+ c = self.config
+ self.manifest = {}
+ # assigned in _post_create_virtualenv
+ self.virtualenv_imports = None
+ self.bucket = c['bucket']
+ if not all(aws_creds):
+ self.fatal('credentials must be passed in env: "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY"')
+ self.aws_key_id, self.aws_secret_key = aws_creds
+ # if excludes is set from command line, use it otherwise use defaults
+ self.excludes = self.config.get('excludes', DEFAULT_EXCLUDES)
+ dirs = self.query_abs_dirs()
+ self.dest_dir = os.path.join(dirs['abs_work_dir'], CACHE_DIR)
+ self.mime_fix()
+
+ def activate_virtualenv(self):
+ """
+ activates virtualenv and adds module imports to a instance wide namespace.
+
+ creating and activating a virtualenv onto the currently executing python interpreter is a
+ bit black magic. Rather than having import statements added in various places within the
+ script, we import them here immediately after we activate the newly created virtualenv
+ """
+ VirtualenvMixin.activate_virtualenv(self)
+
+ import boto
+ import yaml
+ import jinja2
+ self.virtualenv_imports = {
+ 'boto': boto,
+ 'yaml': yaml,
+ 'jinja2': jinja2,
+ }
+ self.log("activated virtualenv with the modules: {}".format(str(self.virtualenv_imports)))
+
+ def _get_template_vars(self):
+ return {
+ "platform": self.config['platform'],
+ "locales": self.config.get('locales'),
+ "version": self.config['version'],
+ "app_version": self.config.get('app_version', ''),
+ "partial_version": self.config.get('partial_version', ''),
+ "build_num": self.config['build_num'],
+ # keep the trailing slash
+ "s3_prefix": 'pub/{prod}/candidates/{ver}-candidates/{n}/'.format(
+ prod=self.config['product'], ver=self.config['version'],
+ n=self.config['build_num']
+ ),
+ "artifact_base_url": self.config['artifact_base_url'].format(
+ taskid=self.config['taskid'], subdir=self.config['artifact_subdir']
+ )
+ }
+
+ def generate_candidates_manifest(self):
+ """
+ generates and outputs a manifest that maps expected Taskcluster artifact names
+ to release deliverable names
+ """
+ self.log('generating manifest from {}...'.format(self.config['template']))
+ template_dir, template_file = os.path.split(os.path.abspath(self.config['template']))
+ jinja2 = self.virtualenv_imports['jinja2']
+ yaml = self.virtualenv_imports['yaml']
+
+ jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
+ undefined=jinja2.StrictUndefined)
+ template = jinja_env.get_template(template_file)
+ self.manifest = yaml.safe_load(template.render(**self._get_template_vars()))
+
+ self.log("manifest generated:")
+ self.log(pprint.pformat(self.manifest['mapping']))
+
+ def verify_bits(self):
+ """
+ inspects each artifact and verifies that they were created by trustworthy tasks
+ """
+ # TODO
+ self.log('skipping verification. unimplemented...')
+
+ def refresh_antivirus(self):
+ self.info("Refreshing clamav db...")
+ try:
+ redo.retry(lambda:
+ sh.freshclam("--stdout", "--verbose", _timeout=300,
+ _err_to_out=True))
+ self.info("Done.")
+ except sh.ErrorReturnCode:
+ self.warning("Freshclam failed, skipping DB update")
+
+ def download_bits(self):
+ """
+ downloads list of artifacts to self.dest_dir dir based on a given manifest
+ """
+ self.log('downloading and uploading artifacts to self_dest_dir...')
+ dirs = self.query_abs_dirs()
+
+ for locale in self.manifest['mapping']:
+ for deliverable in self.manifest['mapping'][locale]:
+ self.log("downloading '{}' deliverable for '{}' locale".format(deliverable, locale))
+ source = self.manifest['mapping'][locale][deliverable]['artifact']
+ self.retry(
+ self.download_file,
+ args=[source],
+ kwargs={'parent_dir': dirs['abs_work_dir']},
+ error_level=FATAL)
+ self.log('Success!')
+
+ def _strip_prefix(self, s3_key):
+ """Return file name relative to prefix"""
+ # "abc/def/hfg".split("abc/de")[-1] == "f/hfg"
+ return s3_key.split(self._get_template_vars()["s3_prefix"])[-1]
+
+ def upload_bits(self):
+ """
+ uploads list of artifacts to s3 candidates dir based on a given manifest
+ """
+ self.log('uploading artifacts to s3...')
+ dirs = self.query_abs_dirs()
+
+ # connect to s3
+ boto = self.virtualenv_imports['boto']
+ conn = boto.connect_s3(self.aws_key_id, self.aws_secret_key)
+ bucket = conn.get_bucket(self.bucket)
+
+ for locale in self.manifest['mapping']:
+ for deliverable in self.manifest['mapping'][locale]:
+ self.log("uploading '{}' deliverable for '{}' locale".format(deliverable, locale))
+ # we have already downloaded the files locally so we can use that version
+ source = self.manifest['mapping'][locale][deliverable]['artifact']
+ s3_key = self.manifest['mapping'][locale][deliverable]['s3_key']
+ downloaded_file = os.path.join(dirs['abs_work_dir'], self.get_filename_from_url(source))
+ # generate checksums for every uploaded file
+ beet_file_name = '{}.beet'.format(downloaded_file)
+ # upload checksums to a separate subdirectory
+ beet_dest = '{prefix}beetmover-checksums/{f}.beet'.format(
+ prefix=self._get_template_vars()["s3_prefix"],
+ f=self._strip_prefix(s3_key)
+ )
+ beet_contents = '\n'.join([
+ '{hash} {fmt} {size} {name}'.format(
+ hash=self.get_hash_for_file(downloaded_file, hash_type=fmt),
+ fmt=fmt,
+ size=os.path.getsize(downloaded_file),
+ name=self._strip_prefix(s3_key)) for fmt in HASH_FORMATS
+ ])
+ self.write_to_file(beet_file_name, beet_contents)
+ self.upload_bit(source=downloaded_file, s3_key=s3_key,
+ bucket=bucket)
+ self.upload_bit(source=beet_file_name, s3_key=beet_dest,
+ bucket=bucket)
+ self.log('Success!')
+
+
+ def upload_bit(self, source, s3_key, bucket):
+ boto = self.virtualenv_imports['boto']
+ self.info('uploading to s3 with key: {}'.format(s3_key))
+ key = boto.s3.key.Key(bucket) # create new key
+ key.key = s3_key # set key name
+
+ self.info("Checking if `{}` already exists".format(s3_key))
+ key = bucket.get_key(s3_key)
+ if not key:
+ self.info("Uploading to `{}`".format(s3_key))
+ key = bucket.new_key(s3_key)
+ # set key value
+ mime_type, _ = mimetypes.guess_type(source)
+ self.retry(lambda: key.set_contents_from_filename(source, headers={'Content-Type': mime_type}),
+ error_level=FATAL),
+ else:
+ if not get_hash(key.get_contents_as_string()) == get_hash(open(source).read()):
+ # for now, let's halt. If necessary, we can revisit this and allow for overwrites
+ # to the same buildnum release with different bits
+ self.fatal("`{}` already exists with different checksum.".format(s3_key))
+ self.log("`{}` has the same MD5 checksum, not uploading".format(s3_key))
+
+ def scan_bits(self):
+
+ dirs = self.query_abs_dirs()
+
+ filenames = [f for f in listdir(dirs['abs_work_dir']) if isfile(join(dirs['abs_work_dir'], f))]
+ self.mkdir_p(self.dest_dir)
+ for file_name in filenames:
+ if self._matches_exclude(file_name):
+ self.info("Excluding {} from virus scan".format(file_name))
+ else:
+ self.info('Copying {} to {}'.format(file_name,self.dest_dir))
+ self.copyfile(os.path.join(dirs['abs_work_dir'], file_name), os.path.join(self.dest_dir,file_name))
+ self._scan_files()
+ self.info('Emptying {}'.format(self.dest_dir))
+ self.rmtree(self.dest_dir)
+
+ def _scan_files(self):
+ """Scan the files we've collected. We do the download and scan concurrently to make
+ it easier to have a coherent log afterwards. Uses the venv python."""
+ external_tools_path = os.path.join(
+ os.path.abspath(os.path.dirname(os.path.dirname(mozharness.__file__))), 'external_tools')
+ self.run_command([self.query_python_path(), os.path.join(external_tools_path,'extract_and_run_command.py'),
+ '-j{}'.format(self.config['scan_parallelization']),
+ 'clamscan', '--no-summary', '--', self.dest_dir])
+
+ def _matches_exclude(self, keyname):
+ return any(re.search(exclude, keyname) for exclude in self.excludes)
+
+ def mime_fix(self):
+ """ Add mimetypes for custom extensions """
+ mimetypes.init()
+ map(lambda (ext, mime_type,): mimetypes.add_type(mime_type, ext), MIME_MAP.items())
+
+if __name__ == '__main__':
+ beet_mover = BeetMover(pop_aws_auth_from_env())
+ beet_mover.run_and_exit()
diff --git a/testing/mozharness/scripts/release/generate-checksums.py b/testing/mozharness/scripts/release/generate-checksums.py
new file mode 100644
index 000000000..61a1c43d2
--- /dev/null
+++ b/testing/mozharness/scripts/release/generate-checksums.py
@@ -0,0 +1,284 @@
+from multiprocessing.pool import ThreadPool
+import os
+from os import path
+import re
+import sys
+import posixpath
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+from mozharness.base.vcs.vcsbase import VCSMixin
+from mozharness.mozilla.checksums import parse_checksums_file
+from mozharness.mozilla.signing import SigningMixin
+from mozharness.mozilla.buildbot import BuildbotMixin
+
+class ChecksumsGenerator(BaseScript, VirtualenvMixin, SigningMixin, VCSMixin, BuildbotMixin):
+ config_options = [
+ [["--stage-product"], {
+ "dest": "stage_product",
+ "help": "Name of product used in file server's directory structure, eg: firefox, mobile",
+ }],
+ [["--version"], {
+ "dest": "version",
+ "help": "Version of release, eg: 39.0b5",
+ }],
+ [["--build-number"], {
+ "dest": "build_number",
+ "help": "Build number of release, eg: 2",
+ }],
+ [["--bucket-name-prefix"], {
+ "dest": "bucket_name_prefix",
+ "help": "Prefix of bucket name, eg: net-mozaws-prod-delivery. This will be used to generate a full bucket name (such as net-mozaws-prod-delivery-{firefox,archive}.",
+ }],
+ [["--bucket-name-full"], {
+ "dest": "bucket_name_full",
+ "help": "Full bucket name, eg: net-mozaws-prod-delivery-firefox",
+ }],
+ [["-j", "--parallelization"], {
+ "dest": "parallelization",
+ "default": 20,
+ "type": int,
+ "help": "Number of checksums file to download concurrently",
+ }],
+ [["-f", "--format"], {
+ "dest": "formats",
+ "default": [],
+ "action": "append",
+ "help": "Format(s) to generate big checksums file for. Default: sha512",
+ }],
+ [["--include"], {
+ "dest": "includes",
+ "default": [],
+ "action": "append",
+ "help": "List of patterns to include in big checksums file. See script source for default.",
+ }],
+ [["--tools-repo"], {
+ "dest": "tools_repo",
+ "default": "https://hg.mozilla.org/build/tools",
+ }],
+ [["--credentials"], {
+ "dest": "credentials",
+ "help": "File containing access key and secret access key for S3",
+ }],
+ ] + virtualenv_config_options
+
+ def __init__(self):
+ BaseScript.__init__(self,
+ config_options=self.config_options,
+ require_config_file=False,
+ config={
+ "virtualenv_modules": [
+ "pip==1.5.5",
+ "boto",
+ ],
+ "virtualenv_path": "venv",
+ 'buildbot_json_path': 'buildprops.json',
+ },
+ all_actions=[
+ "create-virtualenv",
+ "collect-individual-checksums",
+ "create-big-checksums",
+ "sign",
+ "upload",
+ "copy-info-files",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "collect-individual-checksums",
+ "create-big-checksums",
+ "sign",
+ "upload",
+ ],
+ )
+
+ self.checksums = {}
+ self.bucket = None
+ self.bucket_name = self._get_bucket_name()
+ self.file_prefix = self._get_file_prefix()
+ # set the env var for boto to read our special config file
+ # rather than anything else we have at ~/.boto
+ os.environ["BOTO_CONFIG"] = os.path.abspath(self.config["credentials"])
+
+ def _pre_config_lock(self, rw_config):
+ super(ChecksumsGenerator, self)._pre_config_lock(rw_config)
+
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ # TODO: version should come from repo
+ props = self.buildbot_config["properties"]
+ for prop in ['version', 'build_number']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+
+ # These defaults are set here rather in the config because default
+ # lists cannot be completely overidden, only appended to.
+ if not self.config.get("formats"):
+ self.config["formats"] = ["sha512", "sha256"]
+
+ if not self.config.get("includes"):
+ self.config["includes"] = [
+ r"^.*\.tar\.bz2$",
+ r"^.*\.tar\.xz$",
+ r"^.*\.dmg$",
+ r"^.*\.bundle$",
+ r"^.*\.mar$",
+ r"^.*Setup.*\.exe$",
+ r"^.*\.xpi$",
+ r"^.*fennec.*\.apk$",
+ ]
+
+ def _get_bucket_name(self):
+ if self.config.get('bucket_name_full'):
+ return self.config['bucket_name_full']
+
+ suffix = "archive"
+ # Firefox has a special bucket, per https://github.com/mozilla-services/product-delivery-tools/blob/master/bucketmap.go
+ if self.config["stage_product"] == "firefox":
+ suffix = "firefox"
+
+ return "{}-{}".format(self.config["bucket_name_prefix"], suffix)
+
+ def _get_file_prefix(self):
+ return "pub/{}/candidates/{}-candidates/build{}/".format(
+ self.config["stage_product"], self.config["version"], self.config["build_number"]
+ )
+
+ def _get_sums_filename(self, format_):
+ return "{}SUMS".format(format_.upper())
+
+ def _get_bucket(self):
+ if not self.bucket:
+ self.activate_virtualenv()
+ from boto.s3.connection import S3Connection
+
+ self.info("Connecting to S3")
+ conn = S3Connection()
+ self.debug("Successfully connected to S3")
+ self.info("Connecting to bucket {}".format(self.bucket_name))
+ self.bucket = conn.get_bucket(self.bucket_name)
+ return self.bucket
+
+ def collect_individual_checksums(self):
+ """This step grabs all of the small checksums files for the release,
+ filters out any unwanted files from within them, and adds the remainder
+ to self.checksums for subsequent steps to use."""
+ bucket = self._get_bucket()
+ self.info("File prefix is: {}".format(self.file_prefix))
+
+ # Temporary holding place for checksums
+ raw_checksums = []
+ def worker(item):
+ self.debug("Downloading {}".format(item))
+ # TODO: It would be nice to download the associated .asc file
+ # and verify against it.
+ sums = bucket.get_key(item).get_contents_as_string()
+ raw_checksums.append(sums)
+
+ def find_checksums_files():
+ self.info("Getting key names from bucket")
+ checksum_files = {"beets": [], "checksums": []}
+ for key in bucket.list(prefix=self.file_prefix):
+ if key.key.endswith(".checksums"):
+ self.debug("Found checksums file: {}".format(key.key))
+ checksum_files["checksums"].append(key.key)
+ elif key.key.endswith(".beet"):
+ self.debug("Found beet file: {}".format(key.key))
+ checksum_files["beets"].append(key.key)
+ else:
+ self.debug("Ignoring non-checksums file: {}".format(key.key))
+ if checksum_files["beets"]:
+ self.log("Using beet format")
+ return checksum_files["beets"]
+ else:
+ self.log("Using checksums format")
+ return checksum_files["checksums"]
+
+ pool = ThreadPool(self.config["parallelization"])
+ pool.map(worker, find_checksums_files())
+
+ for c in raw_checksums:
+ for f, info in parse_checksums_file(c).iteritems():
+ for pattern in self.config["includes"]:
+ if re.search(pattern, f):
+ if f in self.checksums:
+ self.fatal("Found duplicate checksum entry for {}, don't know which one to pick.".format(f))
+ if not set(self.config["formats"]) <= set(info["hashes"]):
+ self.fatal("Missing necessary format for file {}".format(f))
+ self.debug("Adding checksums for file: {}".format(f))
+ self.checksums[f] = info
+ break
+ else:
+ self.debug("Ignoring checksums for file: {}".format(f))
+
+ def create_big_checksums(self):
+ for fmt in self.config["formats"]:
+ sums = self._get_sums_filename(fmt)
+ self.info("Creating big checksums file: {}".format(sums))
+ with open(sums, "w+") as output_file:
+ for fn in sorted(self.checksums):
+ output_file.write("{} {}\n".format(self.checksums[fn]["hashes"][fmt], fn))
+
+ def sign(self):
+ dirs = self.query_abs_dirs()
+
+ tools_dir = path.join(dirs["abs_work_dir"], "tools")
+ self.vcs_checkout(
+ repo=self.config["tools_repo"],
+ branch="default",
+ vcs="hg",
+ dest=tools_dir,
+ )
+
+ sign_cmd = self.query_moz_sign_cmd(formats=["gpg"])
+
+ for fmt in self.config["formats"]:
+ sums = self._get_sums_filename(fmt)
+ self.info("Signing big checksums file: {}".format(sums))
+ retval = self.run_command(sign_cmd + [sums])
+ if retval != 0:
+ self.fatal("Failed to sign {}".format(sums))
+
+ def upload(self):
+ # we need to provide the public side of the gpg key so that people can
+ # verify the detached signatures
+ dirs = self.query_abs_dirs()
+ tools_dir = path.join(dirs["abs_work_dir"], "tools")
+ self.copyfile(os.path.join(tools_dir, 'scripts', 'release', 'KEY'),
+ 'KEY')
+ files = ['KEY']
+
+ for fmt in self.config["formats"]:
+ files.append(self._get_sums_filename(fmt))
+ files.append("{}.asc".format(self._get_sums_filename(fmt)))
+
+ bucket = self._get_bucket()
+ for f in files:
+ dest = posixpath.join(self.file_prefix, f)
+ self.info("Uploading {} to {}".format(f, dest))
+ key = bucket.new_key(dest)
+ key.set_contents_from_filename(f, headers={'Content-Type': 'text/plain'})
+
+ def copy_info_files(self):
+ bucket = self._get_bucket()
+
+ for key in bucket.list(prefix=self.file_prefix):
+ if re.search(r'/en-US/android.*_info\.txt$', key.name):
+ self.info("Found {}".format(key.name))
+ dest = posixpath.join(self.file_prefix, posixpath.basename(key.name))
+ self.info("Copying to {}".format(dest))
+ bucket.copy_key(new_key_name=dest,
+ src_bucket_name=self.bucket_name,
+ src_key_name=key.name,
+ metadata={'Content-Type': 'text/plain'})
+
+
+if __name__ == "__main__":
+ myScript = ChecksumsGenerator()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/release/postrelease_bouncer_aliases.py b/testing/mozharness/scripts/release/postrelease_bouncer_aliases.py
new file mode 100644
index 000000000..78a60b4bc
--- /dev/null
+++ b/testing/mozharness/scripts/release/postrelease_bouncer_aliases.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" postrelease_bouncer_aliases.py
+
+A script to replace the old-fashion way of updating the bouncer aliaes through
+tools script.
+"""
+
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+
+
+# PostReleaseBouncerAliases {{{1
+class PostReleaseBouncerAliases(BaseScript, VirtualenvMixin, BuildbotMixin):
+ config_options = virtualenv_config_options
+
+ def __init__(self, require_config_file=True):
+ super(PostReleaseBouncerAliases, self).__init__(
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ config={
+ "virtualenv_modules": [
+ "redo",
+ "requests",
+ ],
+ "virtualenv_path": "venv",
+ 'credentials_file': 'oauth.txt',
+ 'buildbot_json_path': 'buildprops.json',
+ },
+ all_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "update-bouncer-aliases",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "update-bouncer-aliases",
+ ],
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(PostReleaseBouncerAliases, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ props = self.buildbot_config["properties"]
+ for prop in ['tuxedo_server_url', 'version']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+ else:
+ self.warning("%s could not be found within buildprops" % prop)
+ return
+
+ def _update_bouncer_alias(self, tuxedo_server_url, auth,
+ related_product, alias):
+ from redo import retry
+ import requests
+
+ url = "%s/create_update_alias" % tuxedo_server_url
+ data = {"alias": alias, "related_product": related_product}
+ self.log("Updating {} to point to {} using {}".format(alias,
+ related_product,
+ url))
+
+ # Wrap the real call to hide credentials from retry's logging
+ def do_update_bouncer_alias():
+ r = requests.post(url, data=data, auth=auth,
+ verify=False, timeout=60)
+ r.raise_for_status()
+
+ retry(do_update_bouncer_alias)
+
+ def update_bouncer_aliases(self):
+ tuxedo_server_url = self.config['tuxedo_server_url']
+ credentials_file = os.path.join(os.getcwd(),
+ self.config['credentials_file'])
+ credentials = {}
+ execfile(credentials_file, credentials)
+ auth = (credentials['tuxedoUsername'], credentials['tuxedoPassword'])
+ version = self.config['version']
+ for product, info in self.config["products"].iteritems():
+ if "alias" in info:
+ product_template = info["product-name"]
+ related_product = product_template % {"version": version}
+ self._update_bouncer_alias(tuxedo_server_url, auth,
+ related_product, info["alias"])
+
+
+# __main__ {{{1
+if __name__ == '__main__':
+ PostReleaseBouncerAliases().run_and_exit()
diff --git a/testing/mozharness/scripts/release/postrelease_mark_as_shipped.py b/testing/mozharness/scripts/release/postrelease_mark_as_shipped.py
new file mode 100644
index 000000000..f84b5771c
--- /dev/null
+++ b/testing/mozharness/scripts/release/postrelease_mark_as_shipped.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" postrelease_mark_as_shipped.py
+
+A script to automate the manual way of updating a release as shipped in Ship-it
+following its successful ship-to-the-door opertion.
+"""
+import os
+import sys
+from datetime import datetime
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+
+
+def build_release_name(product, version, buildno):
+ """Function to reconstruct the name of the release based on product,
+ version and buildnumber
+ """
+ return "{}-{}-build{}".format(product.capitalize(),
+ str(version), str(buildno))
+
+
+class MarkReleaseAsShipped(BaseScript, VirtualenvMixin, BuildbotMixin):
+ config_options = virtualenv_config_options
+
+ def __init__(self, require_config_file=True):
+ super(MarkReleaseAsShipped, self).__init__(
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ config={
+ "virtualenv_modules": [
+ "shipitapi",
+ ],
+ "virtualenv_path": "venv",
+ "credentials_file": "oauth.txt",
+ "buildbot_json_path": "buildprops.json",
+ "timeout": 60,
+ },
+ all_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "mark-as-shipped",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "mark-as-shipped",
+ ],
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(MarkReleaseAsShipped, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ props = self.buildbot_config['properties']
+ mandatory_props = ['product', 'version', 'build_number']
+ missing_props = []
+ for prop in mandatory_props:
+ if prop in props:
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+ else:
+ self.warning("%s could not be found within buildprops" % prop)
+ missing_props.append(prop)
+
+ if missing_props:
+ raise Exception("%s not found in configs" % missing_props)
+
+ self.config['name'] = build_release_name(self.config['product'],
+ self.config['version'],
+ self.config['build_number'])
+
+ def mark_as_shipped(self):
+ """Method to make a simple call to Ship-it API to change a release
+ status to 'shipped'
+ """
+ credentials_file = os.path.join(os.getcwd(),
+ self.config["credentials_file"])
+ credentials = {}
+ execfile(credentials_file, credentials)
+ ship_it_credentials = credentials["ship_it_credentials"]
+ auth = (self.config["ship_it_username"],
+ ship_it_credentials.get(self.config["ship_it_username"]))
+ api_root = self.config['ship_it_root']
+
+ from shipitapi import Release
+ release_api = Release(auth, api_root=api_root,
+ timeout=self.config['timeout'])
+ shipped_at = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
+
+ self.info("Mark the release as shipped with %s timestamp" % shipped_at)
+ release_api.update(self.config['name'],
+ status='shipped', shippedAt=shipped_at)
+
+
+if __name__ == '__main__':
+ MarkReleaseAsShipped().run_and_exit()
diff --git a/testing/mozharness/scripts/release/postrelease_version_bump.py b/testing/mozharness/scripts/release/postrelease_version_bump.py
new file mode 100644
index 000000000..dfffa699a
--- /dev/null
+++ b/testing/mozharness/scripts/release/postrelease_version_bump.py
@@ -0,0 +1,184 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" postrelease_version_bump.py
+
+A script to increase in-tree version number after shipping a release.
+"""
+
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.repo_manupulation import MercurialRepoManipulationMixin
+
+
+# PostReleaseVersionBump {{{1
+class PostReleaseVersionBump(MercurialScript, BuildbotMixin,
+ MercurialRepoManipulationMixin):
+ config_options = [
+ [['--hg-user', ], {
+ "action": "store",
+ "dest": "hg_user",
+ "type": "string",
+ "default": "ffxbld <release@mozilla.com>",
+ "help": "Specify what user to use to commit to hg.",
+ }],
+ [['--next-version', ], {
+ "action": "store",
+ "dest": "next_version",
+ "type": "string",
+ "help": "Next version used in version bump",
+ }],
+ [['--ssh-user', ], {
+ "action": "store",
+ "dest": "ssh_user",
+ "type": "string",
+ "help": "SSH username with hg.mozilla.org permissions",
+ }],
+ [['--ssh-key', ], {
+ "action": "store",
+ "dest": "ssh_key",
+ "type": "string",
+ "help": "Path to SSH key.",
+ }],
+ [['--product', ], {
+ "action": "store",
+ "dest": "product",
+ "type": "string",
+ "help": "Product name",
+ }],
+ [['--version', ], {
+ "action": "store",
+ "dest": "version",
+ "type": "string",
+ "help": "Version",
+ }],
+ [['--build-number', ], {
+ "action": "store",
+ "dest": "build_number",
+ "type": "string",
+ "help": "Build number",
+ }],
+ [['--revision', ], {
+ "action": "store",
+ "dest": "revision",
+ "type": "string",
+ "help": "HG revision to tag",
+ }],
+ ]
+
+ def __init__(self, require_config_file=True):
+ super(PostReleaseVersionBump, self).__init__(
+ config_options=self.config_options,
+ all_actions=[
+ 'clobber',
+ 'clean-repos',
+ 'pull',
+ 'bump_postrelease',
+ 'commit-changes',
+ 'tag',
+ 'push',
+ ],
+ default_actions=[
+ 'clean-repos',
+ 'pull',
+ 'bump_postrelease',
+ 'commit-changes',
+ 'tag',
+ 'push',
+ ],
+ config={
+ 'buildbot_json_path': 'buildprops.json',
+ },
+ require_config_file=require_config_file
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(PostReleaseVersionBump, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ else:
+ props = self.buildbot_config["properties"]
+ for prop in ['next_version', 'product', 'version', 'build_number',
+ 'revision']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+
+ if not self.config.get("next_version"):
+ self.fatal("Next version has to be set. Use --next-version or "
+ "pass `next_version' via buildbot properties.")
+
+ def query_abs_dirs(self):
+ """ Allow for abs_from_dir and abs_to_dir
+ """
+ if self.abs_dirs:
+ return self.abs_dirs
+ self.abs_dirs = super(PostReleaseVersionBump, self).query_abs_dirs()
+ self.abs_dirs["abs_gecko_dir"] = os.path.join(
+ self.abs_dirs['abs_work_dir'], self.config["repo"]["dest"])
+ return self.abs_dirs
+
+ def query_repos(self):
+ """Build a list of repos to clone."""
+ return [self.config["repo"]]
+
+ def query_commit_dirs(self):
+ return [self.query_abs_dirs()["abs_gecko_dir"]]
+
+ def query_commit_message(self):
+ return "Automatic version bump. CLOSED TREE NO BUG a=release"
+
+ def query_push_dirs(self):
+ return self.query_commit_dirs()
+
+ def query_push_args(self, cwd):
+ # cwd is not used here
+ hg_ssh_opts = "ssh -l {user} -i {key}".format(
+ user=self.config["ssh_user"],
+ key=os.path.expanduser(self.config["ssh_key"])
+ )
+ return ["-e", hg_ssh_opts, "-r", "."]
+
+ def pull(self):
+ super(PostReleaseVersionBump, self).pull(
+ repos=self.query_repos())
+
+ def bump_postrelease(self, *args, **kwargs):
+ """Bump version"""
+ dirs = self.query_abs_dirs()
+ for f in self.config["version_files"]:
+ curr_version = ".".join(
+ self.get_version(dirs['abs_gecko_dir'], f["file"]))
+ self.replace(os.path.join(dirs['abs_gecko_dir'], f["file"]),
+ curr_version, self.config["next_version"])
+
+ def tag(self):
+ dirs = self.query_abs_dirs()
+ tags = ["{product}_{version}_BUILD{build_number}",
+ "{product}_{version}_RELEASE"]
+ tags = [t.format(product=self.config["product"].upper(),
+ version=self.config["version"].replace(".", "_"),
+ build_number=self.config["build_number"])
+ for t in tags]
+ message = "No bug - Tagging {revision} with {tags} a=release CLOSED TREE"
+ message = message.format(
+ revision=self.config["revision"],
+ tags=', '.join(tags))
+ self.hg_tag(cwd=dirs["abs_gecko_dir"], tags=tags,
+ revision=self.config["revision"], message=message,
+ user=self.config["hg_user"], force=True)
+
+# __main__ {{{1
+if __name__ == '__main__':
+ PostReleaseVersionBump().run_and_exit()
diff --git a/testing/mozharness/scripts/release/publish_balrog.py b/testing/mozharness/scripts/release/publish_balrog.py
new file mode 100644
index 000000000..edb381311
--- /dev/null
+++ b/testing/mozharness/scripts/release/publish_balrog.py
@@ -0,0 +1,119 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" updates.py
+
+A script publish a release to Balrog.
+
+"""
+
+import os
+import sys
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+
+# PublishBalrog {{{1
+
+
+class PublishBalrog(MercurialScript, BuildbotMixin):
+
+ def __init__(self, require_config_file=True):
+ super(PublishBalrog, self).__init__(
+ all_actions=[
+ 'clobber',
+ 'pull',
+ 'submit-to-balrog',
+ ],
+ default_actions=[
+ 'clobber',
+ 'pull',
+ 'submit-to-balrog',
+ ],
+ config={
+ 'buildbot_json_path': 'buildprops.json',
+ 'credentials_file': 'oauth.txt',
+ },
+ require_config_file=require_config_file
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(PublishBalrog, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ # TODO: version and appVersion should come from repo
+ props = self.buildbot_config["properties"]
+ for prop in ['product', 'version', 'build_number', 'channels',
+ 'balrog_api_root', 'schedule_at', 'background_rate']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ self.abs_dirs = super(PublishBalrog, self).query_abs_dirs()
+ self.abs_dirs["abs_tools_dir"] = os.path.join(
+ self.abs_dirs['abs_work_dir'], self.config["repo"]["dest"])
+ return self.abs_dirs
+
+ def query_channel_configs(self):
+ """Return a list of channel configs.
+ For RC builds it returns "release" and "beta" using
+ "enabled_if_version_matches" to match RC.
+
+ :return: list
+ """
+ return [(n, c) for n, c in self.config["update_channels"].items() if
+ n in self.config["channels"]]
+
+ def query_repos(self):
+ """Build a list of repos to clone."""
+ return [self.config["repo"]]
+
+ def pull(self):
+ super(PublishBalrog, self).pull(
+ repos=self.query_repos())
+
+
+ def submit_to_balrog(self):
+ for _, channel_config in self.query_channel_configs():
+ self._submit_to_balrog(channel_config)
+
+ def _submit_to_balrog(self, channel_config):
+ dirs = self.query_abs_dirs()
+ auth = os.path.join(os.getcwd(), self.config['credentials_file'])
+ cmd = [
+ self.query_exe("python"),
+ os.path.join(dirs["abs_tools_dir"],
+ "scripts/build-promotion/balrog-release-shipper.py")]
+ cmd.extend([
+ "--api-root", self.config["balrog_api_root"],
+ "--credentials-file", auth,
+ "--username", self.config["balrog_username"],
+ "--version", self.config["version"],
+ "--product", self.config["product"],
+ "--build-number", str(self.config["build_number"]),
+ "--verbose",
+ ])
+ for r in channel_config["publish_rules"]:
+ cmd.extend(["--rules", r])
+ if self.config.get("schedule_at"):
+ cmd.extend(["--schedule-at", self.config["schedule_at"]])
+ if self.config.get("background_rate"):
+ cmd.extend(["--background-rate", str(self.config["background_rate"])])
+
+ self.retry(lambda: self.run_command(cmd, halt_on_failure=True))
+
+# __main__ {{{1
+if __name__ == '__main__':
+ PublishBalrog().run_and_exit()
diff --git a/testing/mozharness/scripts/release/push-candidate-to-releases.py b/testing/mozharness/scripts/release/push-candidate-to-releases.py
new file mode 100644
index 000000000..5339fa38a
--- /dev/null
+++ b/testing/mozharness/scripts/release/push-candidate-to-releases.py
@@ -0,0 +1,200 @@
+from multiprocessing.pool import ThreadPool
+import os
+import re
+import sys
+
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.aws import pop_aws_auth_from_env
+
+
+class ReleasePusher(BaseScript, VirtualenvMixin):
+ config_options = [
+ [["--product"], {
+ "dest": "product",
+ "help": "Product being released, eg: firefox, thunderbird",
+ }],
+ [["--version"], {
+ "dest": "version",
+ "help": "Version of release, eg: 39.0b5",
+ }],
+ [["--build-number"], {
+ "dest": "build_number",
+ "help": "Build number of release, eg: 2",
+ }],
+ [["--bucket-name"], {
+ "dest": "bucket_name",
+ "help": "Bucket to copy files from candidates/ to releases/",
+ }],
+ [["--credentials"], {
+ "dest": "credentials",
+ "help": "File containing access key and secret access key",
+ }],
+ [["--exclude"], {
+ "dest": "excludes",
+ "default": [
+ r"^.*tests.*$",
+ r"^.*crashreporter.*$",
+ r"^.*[^k]\.zip(\.asc)?$",
+ r"^.*\.log$",
+ r"^.*\.txt$",
+ r"^.*/partner-repacks.*$",
+ r"^.*.checksums(\.asc)?$",
+ r"^.*/logs/.*$",
+ r"^.*/jsshell.*$",
+ r"^.*json$",
+ r"^.*/host.*$",
+ r"^.*/mar-tools/.*$",
+ r"^.*robocop.apk$",
+ r"^.*bouncer.apk$",
+ r"^.*contrib.*",
+ r"^.*/beetmover-checksums/.*$",
+ ],
+ "action": "append",
+ "help": "List of patterns to exclude from copy. The list can be "
+ "extended by passing multiple --exclude arguments.",
+ }],
+ [["-j", "--parallelization"], {
+ "dest": "parallelization",
+ "default": 20,
+ "type": "int",
+ "help": "Number of copy requests to run concurrently",
+ }],
+ ] + virtualenv_config_options
+
+ def __init__(self, aws_creds):
+ BaseScript.__init__(self,
+ config_options=self.config_options,
+ require_config_file=False,
+ config={
+ "virtualenv_modules": [
+ "boto",
+ "redo",
+ ],
+ "virtualenv_path": "venv",
+ },
+ all_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "push-to-releases",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "push-to-releases",
+ ],
+ )
+
+ # validate aws credentials
+ if not (all(aws_creds) or self.config.get('credentials')):
+ self.fatal("aws creds not defined. please add them to your config or env.")
+ if any(aws_creds) and self.config.get('credentials'):
+ self.fatal("aws creds found in env and self.config. please declare in one place only.")
+
+ # set aws credentials
+ if all(aws_creds):
+ self.aws_key_id, self.aws_secret_key = aws_creds
+ else: # use
+ self.aws_key_id, self.aws_secret_key = None, None
+ # set the env var for boto to read our special config file
+ # rather than anything else we have at ~/.boto
+ os.environ["BOTO_CONFIG"] = os.path.abspath(self.config["credentials"])
+
+ def _get_candidates_prefix(self):
+ return "pub/{}/candidates/{}-candidates/build{}/".format(
+ self.config['product'],
+ self.config["version"],
+ self.config["build_number"]
+ )
+
+ def _get_releases_prefix(self):
+ return "pub/{}/releases/{}/".format(
+ self.config["product"],
+ self.config["version"]
+ )
+
+ def _matches_exclude(self, keyname):
+ for exclude in self.config["excludes"]:
+ if re.search(exclude, keyname):
+ return True
+ return False
+
+ def push_to_releases(self):
+ """This step grabs the list of files in the candidates dir,
+ filters out any unwanted files from within them, and copies
+ the remainder."""
+ from boto.s3.connection import S3Connection
+ from boto.exception import S3CopyError, S3ResponseError
+ from redo import retry
+
+ # suppress boto debug logging, it's too verbose with --loglevel=debug
+ import logging
+ logging.getLogger('boto').setLevel(logging.INFO)
+
+ self.info("Connecting to S3")
+ conn = S3Connection(aws_access_key_id=self.aws_key_id,
+ aws_secret_access_key=self.aws_secret_key)
+ self.info("Getting bucket {}".format(self.config["bucket_name"]))
+ bucket = conn.get_bucket(self.config["bucket_name"])
+
+ # ensure the destination is empty
+ self.info("Checking destination {} is empty".format(self._get_releases_prefix()))
+ keys = [k for k in bucket.list(prefix=self._get_releases_prefix())]
+ if keys:
+ self.warning("Destination already exists with %s keys" % len(keys))
+
+ def worker(item):
+ source, destination = item
+
+ def copy_key():
+ source_key = bucket.get_key(source)
+ dest_key = bucket.get_key(destination)
+ # According to http://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html
+ # S3 key MD5 is represented as ETag, except when objects are
+ # uploaded using multipart method. In this case objects's ETag
+ # is constructed using its MD5, minus symbol, and number of
+ # part. See http://stackoverflow.com/questions/12186993/what-is-the-algorithm-to-compute-the-amazon-s3-etag-for-a-file-larger-than-5gb#answer-19896823
+ source_md5 = source_key.etag.split("-")[0]
+ if dest_key:
+ dest_md5 = dest_key.etag.split("-")[0]
+ else:
+ dest_md5 = None
+
+ if not dest_key:
+ self.info("Copying {} to {}".format(source, destination))
+ bucket.copy_key(destination, self.config["bucket_name"],
+ source)
+ elif source_md5 == dest_md5:
+ self.warning(
+ "{} already exists with the same content ({}), skipping copy".format(
+ destination, dest_md5))
+ else:
+ self.fatal(
+ "{} already exists with the different content (src ETag: {}, dest ETag: {}), aborting".format(
+ destination, source_key.etag, dest_key.etag))
+
+ return retry(copy_key, sleeptime=5, max_sleeptime=60,
+ retry_exceptions=(S3CopyError, S3ResponseError))
+
+ def find_release_files():
+ candidates_prefix = self._get_candidates_prefix()
+ release_prefix = self._get_releases_prefix()
+ self.info("Getting key names from candidates")
+ for key in bucket.list(prefix=candidates_prefix):
+ keyname = key.name
+ if self._matches_exclude(keyname):
+ self.debug("Excluding {}".format(keyname))
+ else:
+ destination = keyname.replace(candidates_prefix,
+ release_prefix)
+ yield (keyname, destination)
+
+ pool = ThreadPool(self.config["parallelization"])
+ pool.map(worker, find_release_files())
+
+if __name__ == "__main__":
+ myScript = ReleasePusher(pop_aws_auth_from_env())
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/release/updates.py b/testing/mozharness/scripts/release/updates.py
new file mode 100644
index 000000000..4b660a67b
--- /dev/null
+++ b/testing/mozharness/scripts/release/updates.py
@@ -0,0 +1,299 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" updates.py
+
+A script to bump patcher configs, generate update verification configs, and
+publish top-level release blob information to Balrog.
+
+It clones the tools repo, modifies the existing patcher config to include
+current release build information, generates update verification configs,
+commits the changes and tags the repo using tags by Releng convention.
+After the changes are pushed to the repo, the script submits top-level release
+information to Balrog.
+"""
+
+import os
+import re
+import sys
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.repo_manupulation import MercurialRepoManipulationMixin
+from mozharness.mozilla.release import get_previous_version
+
+
+# UpdatesBumper {{{1
+class UpdatesBumper(MercurialScript, BuildbotMixin,
+ MercurialRepoManipulationMixin):
+ config_options = [
+ [['--hg-user', ], {
+ "action": "store",
+ "dest": "hg_user",
+ "type": "string",
+ "default": "ffxbld <release@mozilla.com>",
+ "help": "Specify what user to use to commit to hg.",
+ }],
+ [['--ssh-user', ], {
+ "action": "store",
+ "dest": "ssh_user",
+ "type": "string",
+ "help": "SSH username with hg.mozilla.org permissions",
+ }],
+ [['--ssh-key', ], {
+ "action": "store",
+ "dest": "ssh_key",
+ "type": "string",
+ "help": "Path to SSH key.",
+ }],
+ ]
+
+ def __init__(self, require_config_file=True):
+ super(UpdatesBumper, self).__init__(
+ config_options=self.config_options,
+ all_actions=[
+ 'clobber',
+ 'pull',
+ 'download-shipped-locales',
+ 'bump-configs',
+ 'commit-changes',
+ 'tag',
+ 'push',
+ 'submit-to-balrog',
+ ],
+ default_actions=[
+ 'clobber',
+ 'pull',
+ 'download-shipped-locales',
+ 'bump-configs',
+ 'commit-changes',
+ 'tag',
+ 'push',
+ 'submit-to-balrog',
+ ],
+ config={
+ 'buildbot_json_path': 'buildprops.json',
+ 'credentials_file': 'oauth.txt',
+ },
+ require_config_file=require_config_file
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(UpdatesBumper, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ # TODO: version and appVersion should come from repo
+ props = self.buildbot_config["properties"]
+ for prop in ['product', 'version', 'build_number', 'revision',
+ 'appVersion', 'balrog_api_root', "channels"]:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+
+ partials = [v.strip() for v in props["partial_versions"].split(",")]
+ self.config["partial_versions"] = [v.split("build") for v in partials]
+ self.config["platforms"] = [p.strip() for p in
+ props["platforms"].split(",")]
+ self.config["channels"] = [c.strip() for c in
+ props["channels"].split(",")]
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ self.abs_dirs = super(UpdatesBumper, self).query_abs_dirs()
+ self.abs_dirs["abs_tools_dir"] = os.path.join(
+ self.abs_dirs['abs_work_dir'], self.config["repo"]["dest"])
+ return self.abs_dirs
+
+ def query_repos(self):
+ """Build a list of repos to clone."""
+ return [self.config["repo"]]
+
+ def query_commit_dirs(self):
+ return [self.query_abs_dirs()["abs_tools_dir"]]
+
+ def query_commit_message(self):
+ return "Automated configuration bump"
+
+ def query_push_dirs(self):
+ return self.query_commit_dirs()
+
+ def query_push_args(self, cwd):
+ # cwd is not used here
+ hg_ssh_opts = "ssh -l {user} -i {key}".format(
+ user=self.config["ssh_user"],
+ key=os.path.expanduser(self.config["ssh_key"])
+ )
+ return ["-e", hg_ssh_opts]
+
+ def query_shipped_locales_path(self):
+ dirs = self.query_abs_dirs()
+ return os.path.join(dirs["abs_work_dir"], "shipped-locales")
+
+ def query_channel_configs(self):
+ """Return a list of channel configs.
+ For RC builds it returns "release" and "beta" using
+ "enabled_if_version_matches" to match RC.
+
+ :return: list
+ """
+ return [(n, c) for n, c in self.config["update_channels"].items() if
+ n in self.config["channels"]]
+
+ def pull(self):
+ super(UpdatesBumper, self).pull(
+ repos=self.query_repos())
+
+ def download_shipped_locales(self):
+ dirs = self.query_abs_dirs()
+ self.mkdir_p(dirs["abs_work_dir"])
+ url = self.config["shipped-locales-url"].format(
+ revision=self.config["revision"])
+ if not self.download_file(url=url,
+ file_name=self.query_shipped_locales_path()):
+ self.fatal("Unable to fetch shipped-locales from %s" % url)
+
+ def bump_configs(self):
+ for channel, channel_config in self.query_channel_configs():
+ self.bump_patcher_config(channel_config)
+ self.bump_update_verify_configs(channel, channel_config)
+
+ def query_matching_partials(self, channel_config):
+ return [(v, b) for v, b in self.config["partial_versions"] if
+ re.match(channel_config["version_regex"], v)]
+
+ def query_patcher_config(self, channel_config):
+ dirs = self.query_abs_dirs()
+ patcher_config = os.path.join(
+ dirs["abs_tools_dir"], "release/patcher-configs",
+ channel_config["patcher_config"])
+ return patcher_config
+
+ def query_update_verify_config(self, channel, platform):
+ dirs = self.query_abs_dirs()
+ uvc = os.path.join(
+ dirs["abs_tools_dir"], "release/updates",
+ "{}-{}-{}.cfg".format(channel, self.config["product"], platform))
+ return uvc
+
+ def bump_patcher_config(self, channel_config):
+ # TODO: to make it possible to run this before we have files copied to
+ # the candidates directory, we need to add support to fetch build IDs
+ # from tasks.
+ dirs = self.query_abs_dirs()
+ env = {"PERL5LIB": os.path.join(dirs["abs_tools_dir"], "lib/perl")}
+ partial_versions = [v[0] for v in
+ self.query_matching_partials(channel_config)]
+ script = os.path.join(
+ dirs["abs_tools_dir"], "release/patcher-config-bump.pl")
+ patcher_config = self.query_patcher_config(channel_config)
+ cmd = [self.query_exe("perl"), script]
+ cmd.extend([
+ "-p", self.config["product"],
+ "-r", self.config["product"].capitalize(),
+ "-v", self.config["version"],
+ "-a", self.config["appVersion"],
+ "-o", get_previous_version(
+ self.config["version"], partial_versions),
+ "-b", str(self.config["build_number"]),
+ "-c", patcher_config,
+ "-f", self.config["archive_domain"],
+ "-d", self.config["download_domain"],
+ "-l", self.query_shipped_locales_path(),
+ ])
+ for v in partial_versions:
+ cmd.extend(["--partial-version", v])
+ for p in self.config["platforms"]:
+ cmd.extend(["--platform", p])
+ for mar_channel_id in channel_config["mar_channel_ids"]:
+ cmd.extend(["--mar-channel-id", mar_channel_id])
+ self.run_command(cmd, halt_on_failure=True, env=env)
+
+ def bump_update_verify_configs(self, channel, channel_config):
+ dirs = self.query_abs_dirs()
+ script = os.path.join(
+ dirs["abs_tools_dir"],
+ "scripts/build-promotion/create-update-verify-config.py")
+ patcher_config = self.query_patcher_config(channel_config)
+ for platform in self.config["platforms"]:
+ cmd = [self.query_exe("python"), script]
+ output = self.query_update_verify_config(channel, platform)
+ cmd.extend([
+ "--config", patcher_config,
+ "--platform", platform,
+ "--update-verify-channel",
+ channel_config["update_verify_channel"],
+ "--output", output,
+ "--archive-prefix", self.config["archive_prefix"],
+ "--previous-archive-prefix",
+ self.config["previous_archive_prefix"],
+ "--product", self.config["product"],
+ "--balrog-url", self.config["balrog_url"],
+ "--build-number", str(self.config["build_number"]),
+ ])
+
+ self.run_command(cmd, halt_on_failure=True)
+
+ def tag(self):
+ dirs = self.query_abs_dirs()
+ tags = ["{product}_{version}_BUILD{build_number}_RUNTIME",
+ "{product}_{version}_RELEASE_RUNTIME"]
+ tags = [t.format(product=self.config["product"].upper(),
+ version=self.config["version"].replace(".", "_"),
+ build_number=self.config["build_number"])
+ for t in tags]
+ self.hg_tag(cwd=dirs["abs_tools_dir"], tags=tags,
+ user=self.config["hg_user"], force=True)
+
+ def submit_to_balrog(self):
+ for _, channel_config in self.query_channel_configs():
+ self._submit_to_balrog(channel_config)
+
+ def _submit_to_balrog(self, channel_config):
+ dirs = self.query_abs_dirs()
+ auth = os.path.join(os.getcwd(), self.config['credentials_file'])
+ cmd = [
+ self.query_exe("python"),
+ os.path.join(dirs["abs_tools_dir"],
+ "scripts/build-promotion/balrog-release-pusher.py")]
+ cmd.extend([
+ "--api-root", self.config["balrog_api_root"],
+ "--download-domain", self.config["download_domain"],
+ "--archive-domain", self.config["archive_domain"],
+ "--credentials-file", auth,
+ "--product", self.config["product"],
+ "--version", self.config["version"],
+ "--build-number", str(self.config["build_number"]),
+ "--app-version", self.config["appVersion"],
+ "--username", self.config["balrog_username"],
+ "--verbose",
+ ])
+ for c in channel_config["channel_names"]:
+ cmd.extend(["--channel", c])
+ for r in channel_config["rules_to_update"]:
+ cmd.extend(["--rule-to-update", r])
+ for p in self.config["platforms"]:
+ cmd.extend(["--platform", p])
+ for v, build_number in self.query_matching_partials(channel_config):
+ partial = "{version}build{build_number}".format(
+ version=v, build_number=build_number)
+ cmd.extend(["--partial-update", partial])
+ if channel_config["requires_mirrors"]:
+ cmd.append("--requires-mirrors")
+ if self.config["balrog_use_dummy_suffix"]:
+ cmd.append("--dummy")
+
+ self.retry(lambda: self.run_command(cmd, halt_on_failure=True))
+
+# __main__ {{{1
+if __name__ == '__main__':
+ UpdatesBumper().run_and_exit()
diff --git a/testing/mozharness/scripts/release/uptake_monitoring.py b/testing/mozharness/scripts/release/uptake_monitoring.py
new file mode 100644
index 000000000..9ec24621f
--- /dev/null
+++ b/testing/mozharness/scripts/release/uptake_monitoring.py
@@ -0,0 +1,188 @@
+#!/usr/bin/env python
+# lint_ignore=E501
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+""" uptake_monitoring.py
+
+A script to replace the old-fashion way of computing the uptake monitoring
+from the scheduler within the slaves.
+"""
+
+import os
+import sys
+import datetime
+import time
+import xml.dom.minidom
+
+sys.path.insert(1, os.path.dirname(os.path.dirname(sys.path[0])))
+
+from mozharness.base.python import VirtualenvMixin, virtualenv_config_options
+from mozharness.base.script import BaseScript
+from mozharness.mozilla.buildbot import BuildbotMixin
+
+
+def get_tuxedo_uptake_url(tuxedo_server_url, related_product, os):
+ return '%s/uptake/?product=%s&os=%s' % (tuxedo_server_url,
+ related_product, os)
+
+
+class UptakeMonitoring(BaseScript, VirtualenvMixin, BuildbotMixin):
+ config_options = virtualenv_config_options
+
+ def __init__(self, require_config_file=True):
+ super(UptakeMonitoring, self).__init__(
+ config_options=self.config_options,
+ require_config_file=require_config_file,
+ config={
+ "virtualenv_modules": [
+ "redo",
+ "requests",
+ ],
+
+ "virtualenv_path": "venv",
+ "credentials_file": "oauth.txt",
+ "buildbot_json_path": "buildprops.json",
+ "poll_interval": 60,
+ "poll_timeout": 20*60,
+ "min_uptake": 10000,
+ },
+ all_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "monitor-uptake",
+ ],
+ default_actions=[
+ "create-virtualenv",
+ "activate-virtualenv",
+ "monitor-uptake",
+ ],
+ )
+
+ def _pre_config_lock(self, rw_config):
+ super(UptakeMonitoring, self)._pre_config_lock(rw_config)
+ # override properties from buildbot properties here as defined by
+ # taskcluster properties
+ self.read_buildbot_config()
+ if not self.buildbot_config:
+ self.warning("Skipping buildbot properties overrides")
+ return
+ props = self.buildbot_config["properties"]
+ for prop in ['tuxedo_server_url', 'version']:
+ if props.get(prop):
+ self.info("Overriding %s with %s" % (prop, props[prop]))
+ self.config[prop] = props.get(prop)
+ else:
+ self.warning("%s could not be found within buildprops" % prop)
+ return
+ partials = [v.strip() for v in props["partial_versions"].split(",")]
+ self.config["partial_versions"] = [v.split("build")[0] for v in partials]
+ self.config["platforms"] = [p.strip() for p in
+ props["platforms"].split(",")]
+
+ def _get_product_uptake(self, tuxedo_server_url, auth,
+ related_product, os):
+ from redo import retry
+ import requests
+
+ url = get_tuxedo_uptake_url(tuxedo_server_url, related_product, os)
+ self.info("Requesting {} from tuxedo".format(url))
+
+ def get_tuxedo_page():
+ r = requests.get(url, auth=auth,
+ verify=False, timeout=60)
+ r.raise_for_status()
+ return r.content
+
+ def calculateUptake(page):
+ doc = xml.dom.minidom.parseString(page)
+ uptake_values = []
+
+ for element in doc.getElementsByTagName('available'):
+ for node in element.childNodes:
+ if node.nodeType == xml.dom.minidom.Node.TEXT_NODE and \
+ node.data.isdigit():
+ uptake_values.append(int(node.data))
+ if not uptake_values:
+ uptake_values = [0]
+ return min(uptake_values)
+
+ page = retry(get_tuxedo_page)
+ uptake = calculateUptake(page)
+ self.info("Current uptake for {} is {}".format(related_product, uptake))
+ return uptake
+
+ def _get_release_uptake(self, auth):
+ assert isinstance(self.config["platforms"], (list, tuple))
+
+ # handle the products first
+ tuxedo_server_url = self.config["tuxedo_server_url"]
+ version = self.config["version"]
+ dl = []
+
+ for product, info in self.config["products"].iteritems():
+ if info.get("check_uptake"):
+ product_template = info["product-name"]
+ related_product = product_template % {"version": version}
+
+ enUS_platforms = set(self.config["platforms"])
+ paths_platforms = set(info["paths"].keys())
+ platforms = enUS_platforms.intersection(paths_platforms)
+
+ for platform in platforms:
+ bouncer_platform = info["paths"].get(platform).get('bouncer-platform')
+ dl.append(self._get_product_uptake(tuxedo_server_url, auth,
+ related_product, bouncer_platform))
+ # handle the partials as well
+ prev_versions = self.config["partial_versions"]
+ for product, info in self.config["partials"].iteritems():
+ if info.get("check_uptake"):
+ product_template = info["product-name"]
+ for prev_version in prev_versions:
+ subs = {
+ "version": version,
+ "prev_version": prev_version
+ }
+ related_product = product_template % subs
+
+ enUS_platforms = set(self.config["platforms"])
+ paths_platforms = set(info["paths"].keys())
+ platforms = enUS_platforms.intersection(paths_platforms)
+
+ for platform in platforms:
+ bouncer_platform = info["paths"].get(platform).get('bouncer-platform')
+ dl.append(self._get_product_uptake(tuxedo_server_url, auth,
+ related_product, bouncer_platform))
+ return min(dl)
+
+ def monitor_uptake(self):
+ credentials_file = os.path.join(os.getcwd(),
+ self.config["credentials_file"])
+ credentials = {}
+ execfile(credentials_file, credentials)
+ auth = (credentials['tuxedoUsername'], credentials['tuxedoPassword'])
+ self.info("Starting the loop to determine the uptake monitoring ...")
+
+ start_time = datetime.datetime.now()
+ while True:
+ delta = (datetime.datetime.now() - start_time).seconds
+ if delta > self.config["poll_timeout"]:
+ self.error("Uptake monitoring sadly timed-out")
+ raise Exception("Time-out during uptake monitoring")
+
+ uptake = self._get_release_uptake(auth)
+ self.info("Current uptake value to check is {}".format(uptake))
+
+ if uptake >= self.config["min_uptake"]:
+ self.info("Uptake monitoring is complete!")
+ break
+ else:
+ self.info("Mirrors not yet updated, sleeping for a bit ...")
+ time.sleep(self.config["poll_interval"])
+
+
+if __name__ == '__main__':
+ myScript = UptakeMonitoring()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/spidermonkey/build.b2g b/testing/mozharness/scripts/spidermonkey/build.b2g
new file mode 100755
index 000000000..958946230
--- /dev/null
+++ b/testing/mozharness/scripts/spidermonkey/build.b2g
@@ -0,0 +1,8 @@
+#!/bin/bash -e
+
+cd $SOURCE
+TOP=$(cd .. && echo $PWD)
+export MOZBUILD_STATE_PATH=$TOP/mozbuild-state
+[ -d $MOZBUILD_STATE_PATH ] || mkdir $MOZBUILD_STATE_PATH
+
+exec ./mach build -v -j8
diff --git a/testing/mozharness/scripts/spidermonkey/build.browser b/testing/mozharness/scripts/spidermonkey/build.browser
new file mode 100755
index 000000000..645d2ae86
--- /dev/null
+++ b/testing/mozharness/scripts/spidermonkey/build.browser
@@ -0,0 +1,10 @@
+#!/bin/bash -e
+
+cd $SOURCE
+TOP=$(cd ..; pwd)
+export MOZBUILD_STATE_PATH=$TOP/mozbuild-state
+[ -d $MOZBUILD_STATE_PATH ] || mkdir $MOZBUILD_STATE_PATH
+
+export MOZCONFIG=$SOURCE/browser/config/mozconfigs/linux64/hazards
+
+exec ./mach build -v -j8
diff --git a/testing/mozharness/scripts/spidermonkey/build.shell b/testing/mozharness/scripts/spidermonkey/build.shell
new file mode 100755
index 000000000..7aad477ea
--- /dev/null
+++ b/testing/mozharness/scripts/spidermonkey/build.shell
@@ -0,0 +1,6 @@
+#!/bin/bash -ex
+
+mkdir -p "$ANALYZED_OBJDIR"
+cd "$ANALYZED_OBJDIR"
+$SOURCE/js/src/configure --enable-debug --enable-optimize --enable-stdcxx-compat --enable-ctypes --enable-nspr-build
+make -j12 -s
diff --git a/testing/mozharness/scripts/spidermonkey_build.py b/testing/mozharness/scripts/spidermonkey_build.py
new file mode 100755
index 000000000..5522545da
--- /dev/null
+++ b/testing/mozharness/scripts/spidermonkey_build.py
@@ -0,0 +1,482 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+import sys
+import copy
+from datetime import datetime
+from functools import wraps
+
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.errors import MakefileErrorList
+from mozharness.base.script import BaseScript
+from mozharness.base.transfer import TransferMixin
+from mozharness.base.vcs.vcsbase import VCSMixin
+from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
+from mozharness.mozilla.buildbot import BuildbotMixin
+from mozharness.mozilla.building.hazards import HazardError, HazardAnalysis
+from mozharness.mozilla.purge import PurgeMixin
+from mozharness.mozilla.mock import MockMixin
+from mozharness.mozilla.tooltool import TooltoolMixin
+
+SUCCESS, WARNINGS, FAILURE, EXCEPTION, RETRY = xrange(5)
+
+
+def requires(*queries):
+ """Wrapper for detecting problems where some bit of information
+ required by the wrapped step is unavailable. Use it put prepending
+ @requires("foo"), which will check whether self.query_foo() returns
+ something useful."""
+ def make_wrapper(f):
+ @wraps(f)
+ def wrapper(self, *args, **kwargs):
+ for query in queries:
+ val = query(self)
+ goodval = not (val is None or "None" in str(val))
+ assert goodval, f.__name__ + " requires " + query.__name__ + " to return a value"
+ return f(self, *args, **kwargs)
+ return wrapper
+ return make_wrapper
+
+
+nuisance_env_vars = ['TERMCAP', 'LS_COLORS', 'PWD', '_']
+
+
+class SpidermonkeyBuild(MockMixin,
+ PurgeMixin, BaseScript,
+ VCSMixin, BuildbotMixin, TooltoolMixin, TransferMixin, BlobUploadMixin):
+ config_options = [
+ [["--repo"], {
+ "dest": "repo",
+ "help": "which gecko repo to get spidermonkey from",
+ }],
+ [["--source"], {
+ "dest": "source",
+ "help": "directory containing gecko source tree (instead of --repo)",
+ }],
+ [["--revision"], {
+ "dest": "revision",
+ }],
+ [["--branch"], {
+ "dest": "branch",
+ }],
+ [["--vcs-share-base"], {
+ "dest": "vcs_share_base",
+ "help": "base directory for shared repositories",
+ }],
+ [["-j"], {
+ "dest": "concurrency",
+ "type": int,
+ "default": 4,
+ "help": "number of simultaneous jobs used while building the shell " +
+ "(currently ignored for the analyzed build",
+ }] + copy.deepcopy(blobupload_config_options)
+ ]
+
+ def __init__(self):
+ super(SpidermonkeyBuild, self).__init__(
+ config_options=self.config_options,
+ # other stuff
+ all_actions=[
+ 'purge',
+ 'checkout-tools',
+
+ # First, build an optimized JS shell for running the analysis
+ 'checkout-source',
+ 'get-blobs',
+ 'clobber-shell',
+ 'configure-shell',
+ 'build-shell',
+
+ # Next, build a tree with the analysis plugin active. Note that
+ # we are using the same checkout for the JS shell build and the
+ # build of the source to be analyzed, which is a little
+ # unnecessary (no need to rebuild the JS shell all the time).
+ # (Different objdir, though.)
+
+ 'clobber-analysis',
+ 'setup-analysis',
+ 'run-analysis',
+ 'collect-analysis-output',
+ 'upload-analysis',
+ 'check-expectations',
+ ],
+ default_actions=[
+ 'purge',
+ 'checkout-tools',
+ 'checkout-source',
+ 'get-blobs',
+ 'clobber-shell',
+ 'configure-shell',
+ 'build-shell',
+ 'clobber-analysis',
+ 'setup-analysis',
+ 'run-analysis',
+ 'collect-analysis-output',
+ # Temporarily disabled, see bug 1211402
+ # 'upload-analysis',
+ 'check-expectations',
+ ],
+ config={
+ 'default_vcs': 'hg',
+ 'vcs_share_base': os.environ.get('HG_SHARE_BASE_DIR'),
+ 'ccache': True,
+ 'buildbot_json_path': os.environ.get('PROPERTIES_FILE'),
+ 'tools_repo': 'https://hg.mozilla.org/build/tools',
+
+ 'upload_ssh_server': None,
+ 'upload_remote_basepath': None,
+ 'enable_try_uploads': True,
+ 'source': None,
+ 'stage_product': 'firefox',
+ },
+ )
+
+ self.buildid = None
+ self.create_virtualenv()
+ self.analysis = HazardAnalysis()
+
+ def _pre_config_lock(self, rw_config):
+ if self.config['source']:
+ self.config['srcdir'] = self.config['source']
+ super(SpidermonkeyBuild, self)._pre_config_lock(rw_config)
+
+ if self.buildbot_config is None:
+ self.info("Reading buildbot build properties...")
+ self.read_buildbot_config()
+
+ if self.buildbot_config:
+ bb_props = [('mock_target', 'mock_target', None),
+ ('hgurl', 'hgurl', None),
+ ('clobberer_url', 'clobberer_url', 'https://api.pub.build.mozilla.org/clobberer/lastclobber'),
+ ('force_clobber', 'force_clobber', None),
+ ('branch', 'blob_upload_branch', None),
+ ]
+ buildbot_props = self.buildbot_config.get('properties', {})
+ for bb_prop, cfg_prop, default in bb_props:
+ if not self.config.get(cfg_prop) and buildbot_props.get(bb_prop, default):
+ self.config[cfg_prop] = buildbot_props.get(bb_prop, default)
+ self.config['is_automation'] = True
+ else:
+ self.config['is_automation'] = False
+ self.config.setdefault('blob_upload_branch', 'devel')
+
+ dirs = self.query_abs_dirs()
+ replacements = self.config['env_replacements'].copy()
+ for k,v in replacements.items():
+ replacements[k] = v % dirs
+
+ self.env = self.query_env(replace_dict=replacements,
+ partial_env=self.config['partial_env'],
+ purge_env=nuisance_env_vars)
+ self.env['MOZ_UPLOAD_DIR'] = dirs['abs_blob_upload_dir']
+ self.env['TOOLTOOL_DIR'] = dirs['abs_work_dir']
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = BaseScript.query_abs_dirs(self)
+
+ abs_work_dir = abs_dirs['abs_work_dir']
+ dirs = {
+ 'shell_objdir':
+ os.path.join(abs_work_dir, self.config['shell-objdir']),
+ 'mozharness_scriptdir':
+ os.path.abspath(os.path.dirname(__file__)),
+ 'abs_analysis_dir':
+ os.path.join(abs_work_dir, self.config['analysis-dir']),
+ 'abs_analyzed_objdir':
+ os.path.join(abs_work_dir, self.config['srcdir'], self.config['analysis-objdir']),
+ 'analysis_scriptdir':
+ os.path.join(self.config['srcdir'], self.config['analysis-scriptdir']),
+ 'abs_tools_dir':
+ os.path.join(abs_dirs['base_work_dir'], 'tools'),
+ 'gecko_src':
+ os.path.join(abs_work_dir, self.config['srcdir']),
+ 'abs_blob_upload_dir':
+ os.path.join(abs_work_dir, 'blobber_upload_dir'),
+ }
+
+ abs_dirs.update(dirs)
+ self.abs_dirs = abs_dirs
+
+ return self.abs_dirs
+
+ def query_repo(self):
+ if self.config.get('repo'):
+ return self.config['repo']
+ elif self.buildbot_config and 'properties' in self.buildbot_config:
+ return self.config['hgurl'] + self.buildbot_config['properties']['repo_path']
+ else:
+ return None
+
+ def query_revision(self):
+ if 'revision' in self.buildbot_properties:
+ revision = self.buildbot_properties['revision']
+ elif self.buildbot_config and 'sourcestamp' in self.buildbot_config:
+ revision = self.buildbot_config['sourcestamp']['revision']
+ else:
+ # Useful for local testing. In actual use, this would always be
+ # None.
+ revision = self.config.get('revision')
+
+ return revision
+
+ def query_branch(self):
+ if self.buildbot_config and 'properties' in self.buildbot_config:
+ return self.buildbot_config['properties']['branch']
+ elif 'branch' in self.config:
+ # Used for locally testing try vs non-try
+ return self.config['branch']
+ else:
+ return os.path.basename(self.query_repo())
+
+ def query_compiler_manifest(self):
+ dirs = self.query_abs_dirs()
+ manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'], self.config['compiler_manifest'])
+ if os.path.exists(manifest):
+ return manifest
+ return os.path.join(dirs['abs_work_dir'], self.config['compiler_manifest'])
+
+ def query_sixgill_manifest(self):
+ dirs = self.query_abs_dirs()
+ manifest = os.path.join(dirs['abs_work_dir'], dirs['analysis_scriptdir'], self.config['sixgill_manifest'])
+ if os.path.exists(manifest):
+ return manifest
+ return os.path.join(dirs['abs_work_dir'], self.config['sixgill_manifest'])
+
+ def query_buildid(self):
+ if self.buildid:
+ return self.buildid
+ if self.buildbot_config and 'properties' in self.buildbot_config:
+ self.buildid = self.buildbot_config['properties'].get('buildid')
+ if not self.buildid:
+ self.buildid = datetime.now().strftime("%Y%m%d%H%M%S")
+ return self.buildid
+
+ def query_upload_ssh_server(self):
+ if self.buildbot_config and 'properties' in self.buildbot_config:
+ return self.buildbot_config['properties']['upload_ssh_server']
+ else:
+ return self.config['upload_ssh_server']
+
+ def query_upload_ssh_key(self):
+ if self.buildbot_config and 'properties' in self.buildbot_config:
+ key = self.buildbot_config['properties']['upload_ssh_key']
+ else:
+ key = self.config['upload_ssh_key']
+ if self.mock_enabled and not key.startswith("/"):
+ key = "/home/mock_mozilla/.ssh/" + key
+ return key
+
+ def query_upload_ssh_user(self):
+ if self.buildbot_config and 'properties' in self.buildbot_config:
+ return self.buildbot_config['properties']['upload_ssh_user']
+ else:
+ return self.config['upload_ssh_user']
+
+ def query_product(self):
+ if self.buildbot_config and 'properties' in self.buildbot_config:
+ return self.buildbot_config['properties']['product']
+ else:
+ return self.config['product']
+
+ def query_upload_remote_basepath(self):
+ if self.config.get('upload_remote_basepath'):
+ return self.config['upload_remote_basepath']
+ else:
+ return "/pub/mozilla.org/{product}".format(
+ product=self.query_product(),
+ )
+
+ def query_upload_remote_baseuri(self):
+ baseuri = self.config.get('upload_remote_baseuri')
+ if self.buildbot_config and 'properties' in self.buildbot_config:
+ buildprops = self.buildbot_config['properties']
+ if 'upload_remote_baseuri' in buildprops:
+ baseuri = buildprops['upload_remote_baseuri']
+ return baseuri.strip("/") if baseuri else None
+
+ def query_target(self):
+ if self.buildbot_config and 'properties' in self.buildbot_config:
+ return self.buildbot_config['properties']['platform']
+ else:
+ return self.config.get('target')
+
+ def query_upload_path(self):
+ branch = self.query_branch()
+
+ common = {
+ 'basepath': self.query_upload_remote_basepath(),
+ 'branch': branch,
+ 'target': self.query_target(),
+ }
+
+ if branch == 'try':
+ if not self.config['enable_try_uploads']:
+ return None
+ try:
+ user = self.buildbot_config['sourcestamp']['changes'][0]['who']
+ except (KeyError, TypeError):
+ user = "unknown"
+ return "{basepath}/try-builds/{user}-{rev}/{branch}-{target}".format(
+ user=user,
+ rev=self.query_revision(),
+ **common
+ )
+ else:
+ return "{basepath}/tinderbox-builds/{branch}-{target}/{buildid}".format(
+ buildid=self.query_buildid(),
+ **common
+ )
+
+ def query_do_upload(self):
+ if self.query_branch() == 'try':
+ return self.config.get('enable_try_uploads')
+ return True
+
+ # Actions {{{2
+ def purge(self):
+ dirs = self.query_abs_dirs()
+ self.info("purging, abs_upload_dir=" + dirs['abs_upload_dir'])
+ PurgeMixin.clobber(
+ self,
+ always_clobber_dirs=[
+ dirs['abs_upload_dir'],
+ ],
+ )
+
+ def checkout_tools(self):
+ dirs = self.query_abs_dirs()
+
+ # If running from within a directory also passed as the --source dir,
+ # this has the danger of clobbering <source>/tools/
+ if self.config['source']:
+ srcdir = self.config['source']
+ if os.path.samefile(srcdir, os.path.dirname(dirs['abs_tools_dir'])):
+ raise Exception("Cannot run from source checkout to avoid overwriting subdirs")
+
+ rev = self.vcs_checkout(
+ vcs='hg',
+ branch="default",
+ repo=self.config['tools_repo'],
+ clean=False,
+ dest=dirs['abs_tools_dir'],
+ )
+ self.set_buildbot_property("tools_revision", rev, write_to_file=True)
+
+ def do_checkout_source(self):
+ # --source option means to use an existing source directory instead of checking one out.
+ if self.config['source']:
+ return
+
+ dirs = self.query_abs_dirs()
+ dest = dirs['gecko_src']
+
+ # Pre-create the directory to appease the share extension
+ if not os.path.exists(dest):
+ self.mkdir_p(dest)
+
+ rev = self.vcs_checkout(
+ repo=self.query_repo(),
+ dest=dest,
+ revision=self.query_revision(),
+ branch=self.config.get('branch'),
+ clean=True,
+ )
+ self.set_buildbot_property('source_revision', rev, write_to_file=True)
+
+ def checkout_source(self):
+ try:
+ self.do_checkout_source()
+ except Exception as e:
+ self.fatal("checkout failed: " + str(e), exit_code=RETRY)
+
+ def get_blobs(self):
+ work_dir = self.query_abs_dirs()['abs_work_dir']
+ if not os.path.exists(work_dir):
+ self.mkdir_p(work_dir)
+ self.tooltool_fetch(self.query_compiler_manifest(), output_dir=work_dir)
+ self.tooltool_fetch(self.query_sixgill_manifest(), output_dir=work_dir)
+
+ def clobber_shell(self):
+ self.analysis.clobber_shell(self)
+
+ def configure_shell(self):
+ self.enable_mock()
+
+ try:
+ self.analysis.configure_shell(self)
+ except HazardError as e:
+ self.fatal(e, exit_code=FAILURE)
+
+ self.disable_mock()
+
+ def build_shell(self):
+ self.enable_mock()
+
+ try:
+ self.analysis.build_shell(self)
+ except HazardError as e:
+ self.fatal(e, exit_code=FAILURE)
+
+ self.disable_mock()
+
+ def clobber_analysis(self):
+ self.analysis.clobber(self)
+
+ def setup_analysis(self):
+ self.analysis.setup(self)
+
+ def run_analysis(self):
+ self.enable_mock()
+
+ upload_dir = self.query_abs_dirs()['abs_blob_upload_dir']
+ if not os.path.exists(upload_dir):
+ self.mkdir_p(upload_dir)
+
+ env = self.env.copy()
+ env['MOZ_UPLOAD_DIR'] = upload_dir
+
+ try:
+ self.analysis.run(self, env=env, error_list=MakefileErrorList)
+ except HazardError as e:
+ self.fatal(e, exit_code=FAILURE)
+
+ self.disable_mock()
+
+ def collect_analysis_output(self):
+ self.analysis.collect_output(self)
+
+ def upload_analysis(self):
+ if not self.config['is_automation']:
+ return
+
+ if not self.query_do_upload():
+ self.info("Uploads disabled for this build. Skipping...")
+ return
+
+ self.enable_mock()
+
+ try:
+ self.analysis.upload_results(self)
+ except HazardError as e:
+ self.error(e)
+ self.return_code = WARNINGS
+
+ self.disable_mock()
+
+ def check_expectations(self):
+ try:
+ self.analysis.check_expectations(self)
+ except HazardError as e:
+ self.fatal(e, exit_code=FAILURE)
+
+
+# main {{{1
+if __name__ == '__main__':
+ myScript = SpidermonkeyBuild()
+ myScript.run_and_exit()
diff --git a/testing/mozharness/scripts/talos_script.py b/testing/mozharness/scripts/talos_script.py
new file mode 100755
index 000000000..dc4161193
--- /dev/null
+++ b/testing/mozharness/scripts/talos_script.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+"""talos
+
+"""
+
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.mozilla.testing.talos import Talos
+
+if __name__ == '__main__':
+ talos = Talos()
+ talos.run_and_exit()
diff --git a/testing/mozharness/scripts/web_platform_tests.py b/testing/mozharness/scripts/web_platform_tests.py
new file mode 100755
index 000000000..7cd0e3842
--- /dev/null
+++ b/testing/mozharness/scripts/web_platform_tests.py
@@ -0,0 +1,258 @@
+#!/usr/bin/env python
+# ***** BEGIN LICENSE BLOCK *****
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this file,
+# You can obtain one at http://mozilla.org/MPL/2.0/.
+# ***** END LICENSE BLOCK *****
+import copy
+import glob
+import json
+import os
+import sys
+
+# load modules from parent dir
+sys.path.insert(1, os.path.dirname(sys.path[0]))
+
+from mozharness.base.script import PreScriptAction
+from mozharness.base.vcs.vcsbase import MercurialScript
+from mozharness.mozilla.blob_upload import BlobUploadMixin, blobupload_config_options
+from mozharness.mozilla.testing.testbase import TestingMixin, testing_config_options, TOOLTOOL_PLATFORM_DIR
+
+from mozharness.mozilla.structuredlog import StructuredOutputParser
+from mozharness.base.log import INFO
+
+class WebPlatformTest(TestingMixin, MercurialScript, BlobUploadMixin):
+ config_options = [
+ [['--test-type'], {
+ "action": "extend",
+ "dest": "test_type",
+ "help": "Specify the test types to run."}
+ ],
+ [['--e10s'], {
+ "action": "store_true",
+ "dest": "e10s",
+ "default": False,
+ "help": "Run with e10s enabled"}
+ ],
+ [["--total-chunks"], {
+ "action": "store",
+ "dest": "total_chunks",
+ "help": "Number of total chunks"}
+ ],
+ [["--this-chunk"], {
+ "action": "store",
+ "dest": "this_chunk",
+ "help": "Number of this chunk"}
+ ],
+ [["--allow-software-gl-layers"], {
+ "action": "store_true",
+ "dest": "allow_software_gl_layers",
+ "default": False,
+ "help": "Permits a software GL implementation (such as LLVMPipe) to use the GL compositor."}]
+ ] + copy.deepcopy(testing_config_options) + \
+ copy.deepcopy(blobupload_config_options)
+
+ def __init__(self, require_config_file=True):
+ super(WebPlatformTest, self).__init__(
+ config_options=self.config_options,
+ all_actions=[
+ 'clobber',
+ 'read-buildbot-config',
+ 'download-and-extract',
+ 'fetch-geckodriver',
+ 'create-virtualenv',
+ 'pull',
+ 'install',
+ 'run-tests',
+ ],
+ require_config_file=require_config_file,
+ config={'require_test_zip': True})
+
+ # Surely this should be in the superclass
+ c = self.config
+ self.installer_url = c.get('installer_url')
+ self.test_url = c.get('test_url')
+ self.test_packages_url = c.get('test_packages_url')
+ self.installer_path = c.get('installer_path')
+ self.binary_path = c.get('binary_path')
+ self.abs_app_dir = None
+ self.geckodriver_path = None
+
+ def query_abs_app_dir(self):
+ """We can't set this in advance, because OSX install directories
+ change depending on branding and opt/debug.
+ """
+ if self.abs_app_dir:
+ return self.abs_app_dir
+ if not self.binary_path:
+ self.fatal("Can't determine abs_app_dir (binary_path not set!)")
+ self.abs_app_dir = os.path.dirname(self.binary_path)
+ return self.abs_app_dir
+
+ def query_abs_dirs(self):
+ if self.abs_dirs:
+ return self.abs_dirs
+ abs_dirs = super(WebPlatformTest, self).query_abs_dirs()
+
+ dirs = {}
+ dirs['abs_app_install_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'application')
+ dirs['abs_test_install_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'tests')
+ dirs["abs_wpttest_dir"] = os.path.join(dirs['abs_test_install_dir'], "web-platform")
+ dirs['abs_blob_upload_dir'] = os.path.join(abs_dirs['abs_work_dir'], 'blobber_upload_dir')
+
+ abs_dirs.update(dirs)
+ self.abs_dirs = abs_dirs
+
+ return self.abs_dirs
+
+ @PreScriptAction('create-virtualenv')
+ def _pre_create_virtualenv(self, action):
+ dirs = self.query_abs_dirs()
+
+ requirements = os.path.join(dirs['abs_test_install_dir'],
+ 'config',
+ 'marionette_requirements.txt')
+
+ self.register_virtualenv_module(requirements=[requirements],
+ two_pass=True)
+
+ def _query_cmd(self):
+ if not self.binary_path:
+ self.fatal("Binary path could not be determined")
+ #And exit
+
+ c = self.config
+ dirs = self.query_abs_dirs()
+ abs_app_dir = self.query_abs_app_dir()
+ run_file_name = "runtests.py"
+
+ cmd = [self.query_python_path('python'), '-u']
+ cmd.append(os.path.join(dirs["abs_wpttest_dir"], run_file_name))
+
+ # Make sure that the logging directory exists
+ if self.mkdir_p(dirs["abs_blob_upload_dir"]) == -1:
+ self.fatal("Could not create blobber upload directory")
+ # Exit
+
+ cmd += ["--log-raw=-",
+ "--log-raw=%s" % os.path.join(dirs["abs_blob_upload_dir"],
+ "wpt_raw.log"),
+ "--log-errorsummary=%s" % os.path.join(dirs["abs_blob_upload_dir"],
+ "wpt_errorsummary.log"),
+ "--binary=%s" % self.binary_path,
+ "--symbols-path=%s" % self.query_symbols_url(),
+ "--stackwalk-binary=%s" % self.query_minidump_stackwalk(),
+ "--stackfix-dir=%s" % os.path.join(dirs["abs_test_install_dir"], "bin")]
+
+ for test_type in c.get("test_type", []):
+ cmd.append("--test-type=%s" % test_type)
+
+ if not c["e10s"]:
+ cmd.append("--disable-e10s")
+
+ for opt in ["total_chunks", "this_chunk"]:
+ val = c.get(opt)
+ if val:
+ cmd.append("--%s=%s" % (opt.replace("_", "-"), val))
+
+ if "wdspec" in c.get("test_type", []):
+ assert self.geckodriver_path is not None
+ cmd.append("--webdriver-binary=%s" % self.geckodriver_path)
+
+ options = list(c.get("options", []))
+
+ str_format_values = {
+ 'binary_path': self.binary_path,
+ 'test_path': dirs["abs_wpttest_dir"],
+ 'test_install_path': dirs["abs_test_install_dir"],
+ 'abs_app_dir': abs_app_dir,
+ 'abs_work_dir': dirs["abs_work_dir"]
+ }
+
+ try_options, try_tests = self.try_args("web-platform-tests")
+
+ cmd.extend(self.query_options(options,
+ try_options,
+ str_format_values=str_format_values))
+ cmd.extend(self.query_tests_args(try_tests,
+ str_format_values=str_format_values))
+
+ return cmd
+
+ def download_and_extract(self):
+ super(WebPlatformTest, self).download_and_extract(
+ extract_dirs=["bin/*",
+ "config/*",
+ "mozbase/*",
+ "marionette/*",
+ "tools/wptserve/*",
+ "web-platform/*"],
+ suite_categories=["web-platform"])
+
+ def fetch_geckodriver(self):
+ c = self.config
+ dirs = self.query_abs_dirs()
+
+ platform_name = self.platform_name()
+
+ if "wdspec" not in c.get("test_type", []):
+ return
+
+ if platform_name != "linux64":
+ self.fatal("Don't have a geckodriver for %s" % platform_name)
+
+ tooltool_path = os.path.join(dirs["abs_test_install_dir"],
+ "config",
+ "tooltool-manifests",
+ TOOLTOOL_PLATFORM_DIR[platform_name],
+ "geckodriver.manifest")
+
+ with open(tooltool_path) as f:
+ manifest = json.load(f)
+
+ assert len(manifest) == 1
+ geckodriver_filename = manifest[0]["filename"]
+ assert geckodriver_filename.endswith(".tar.gz")
+
+ self.tooltool_fetch(
+ manifest=tooltool_path,
+ output_dir=dirs['abs_work_dir'],
+ cache=c.get('tooltool_cache')
+ )
+
+ compressed_path = os.path.join(dirs['abs_work_dir'], geckodriver_filename)
+ tar = self.query_exe('tar', return_type="list")
+ self.run_command(tar + ["xf", compressed_path], cwd=dirs['abs_work_dir'],
+ halt_on_failure=True, fatal_exit_code=3)
+ self.geckodriver_path = os.path.join(dirs['abs_work_dir'], "geckodriver")
+
+ def run_tests(self):
+ dirs = self.query_abs_dirs()
+ cmd = self._query_cmd()
+
+ parser = StructuredOutputParser(config=self.config,
+ log_obj=self.log_obj,
+ log_compact=True)
+
+ env = {'MINIDUMP_SAVE_PATH': dirs['abs_blob_upload_dir']}
+
+ if self.config['allow_software_gl_layers']:
+ env['MOZ_LAYERS_ALLOW_SOFTWARE_GL'] = '1'
+
+ env = self.query_env(partial_env=env, log_level=INFO)
+
+ return_code = self.run_command(cmd,
+ cwd=dirs['abs_work_dir'],
+ output_timeout=1000,
+ output_parser=parser,
+ env=env)
+
+ tbpl_status, log_level = parser.evaluate_parser(return_code)
+
+ self.buildbot_status(tbpl_status, level=log_level)
+
+
+# main {{{1
+if __name__ == '__main__':
+ web_platform_tests = WebPlatformTest()
+ web_platform_tests.run_and_exit()