summaryrefslogtreecommitdiffstats
path: root/taskcluster/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'taskcluster/scripts')
-rwxr-xr-xtaskcluster/scripts/builder/build-haz-linux.sh89
-rwxr-xr-xtaskcluster/scripts/builder/build-l10n.sh98
-rwxr-xr-xtaskcluster/scripts/builder/build-linux.sh122
-rwxr-xr-xtaskcluster/scripts/builder/build-sm-mozjs-crate.sh18
-rwxr-xr-xtaskcluster/scripts/builder/build-sm-package.sh28
-rwxr-xr-xtaskcluster/scripts/builder/build-sm.sh20
-rwxr-xr-xtaskcluster/scripts/builder/desktop-setup.sh24
-rwxr-xr-xtaskcluster/scripts/builder/get-objdir.py20
-rwxr-xr-xtaskcluster/scripts/builder/hazard-analysis.sh149
-rwxr-xr-xtaskcluster/scripts/builder/install-packages.sh13
-rw-r--r--taskcluster/scripts/builder/setup-ccache.sh9
-rwxr-xr-xtaskcluster/scripts/builder/sm-tooltool-config.sh50
-rwxr-xr-xtaskcluster/scripts/copy.sh9
-rwxr-xr-xtaskcluster/scripts/misc/build-binutils-linux.sh16
-rwxr-xr-xtaskcluster/scripts/misc/build-cctools.sh82
-rwxr-xr-xtaskcluster/scripts/misc/build-clang-linux.sh30
-rwxr-xr-xtaskcluster/scripts/misc/build-clang-windows.sh61
-rwxr-xr-xtaskcluster/scripts/misc/build-gcc-linux.sh16
-rwxr-xr-xtaskcluster/scripts/misc/minidump_stackwalk.sh125
-rwxr-xr-xtaskcluster/scripts/misc/repackage-jdk-centos.sh45
-rw-r--r--taskcluster/scripts/tester/harness-test-linux.sh40
-rwxr-xr-xtaskcluster/scripts/tester/run-wizard170
-rw-r--r--taskcluster/scripts/tester/test-b2g.sh118
-rw-r--r--taskcluster/scripts/tester/test-macosx.sh77
-rw-r--r--taskcluster/scripts/tester/test-ubuntu.sh188
25 files changed, 1617 insertions, 0 deletions
diff --git a/taskcluster/scripts/builder/build-haz-linux.sh b/taskcluster/scripts/builder/build-haz-linux.sh
new file mode 100755
index 000000000..1d5ef52ba
--- /dev/null
+++ b/taskcluster/scripts/builder/build-haz-linux.sh
@@ -0,0 +1,89 @@
+#!/bin/bash -ex
+
+function usage() {
+ echo "Usage: $0 [--project <shell|browser>] <workspace-dir> flags..."
+ echo "flags are treated the same way as a commit message would be"
+ echo "(as in, they are scanned for directives just like a try: ... line)"
+}
+
+PROJECT=shell
+WORKSPACE=
+DO_TOOLTOOL=1
+while [[ $# -gt 0 ]]; do
+ if [[ "$1" == "-h" ]] || [[ "$1" == "--help" ]]; then
+ usage
+ exit 0
+ elif [[ "$1" == "--project" ]]; then
+ shift
+ PROJECT="$1"
+ shift
+ elif [[ "$1" == "--no-tooltool" ]]; then
+ shift
+ DO_TOOLTOOL=
+ elif [[ -z "$WORKSPACE" ]]; then
+ WORKSPACE=$( cd "$1" && pwd )
+ shift
+ break
+ fi
+done
+
+SCRIPT_FLAGS="$@"
+
+# Ensure all the scripts in this dir are on the path....
+DIRNAME=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+PATH=$DIRNAME:$PATH
+
+# Use GECKO_BASE_REPOSITORY as a signal for whether we are running in automation.
+export AUTOMATION=${GECKO_BASE_REPOSITORY:+1}
+
+: ${GECKO_DIR:=$WORKSPACE/gecko}
+: ${TOOLTOOL_MANIFEST:=browser/config/tooltool-manifests/linux64/hazard.manifest}
+: ${TOOLTOOL_CACHE:=$WORKSPACE/tt-cache}
+
+if ! [ -d $GECKO_DIR ]; then
+ echo "GECKO_DIR must be set to a directory containing a gecko source checkout" >&2
+ exit 1
+fi
+GECKO_DIR=$( cd "$GECKO_DIR" && pwd )
+
+# Directory to populate with tooltool-installed tools
+export TOOLTOOL_DIR="$WORKSPACE"
+
+# Directory to hold the (useless) object files generated by the analysis.
+export MOZ_OBJDIR="$WORKSPACE/obj-analyzed"
+mkdir -p "$MOZ_OBJDIR"
+
+if [ -n "$DO_TOOLTOOL" ]; then
+ ( cd $TOOLTOOL_DIR; python $GECKO_DIR/testing/docker/recipes/tooltool.py --url https://api.pub.build.mozilla.org/tooltool/ -m $GECKO_DIR/$TOOLTOOL_MANIFEST fetch -c $TOOLTOOL_CACHE )
+fi
+
+export NO_MERCURIAL_SETUP_CHECK=1
+
+if [[ "$PROJECT" = "browser" ]]; then (
+ cd "$WORKSPACE"
+ set "$WORKSPACE"
+ . setup-ccache.sh
+ # Mozbuild config:
+ export MOZBUILD_STATE_PATH=$WORKSPACE/mozbuild/
+ # Create .mozbuild so mach doesn't complain about this
+ mkdir -p $MOZBUILD_STATE_PATH
+) fi
+. hazard-analysis.sh
+
+build_js_shell
+
+# Artifacts folder is outside of the cache.
+mkdir -p $HOME/artifacts/ || true
+
+function onexit () {
+ grab_artifacts "$WORKSPACE/analysis" "$HOME/artifacts"
+}
+
+trap onexit EXIT
+
+configure_analysis "$WORKSPACE/analysis"
+run_analysis "$WORKSPACE/analysis" "$PROJECT"
+
+check_hazards "$WORKSPACE/analysis"
+
+################################### script end ###################################
diff --git a/taskcluster/scripts/builder/build-l10n.sh b/taskcluster/scripts/builder/build-l10n.sh
new file mode 100755
index 000000000..be16955a5
--- /dev/null
+++ b/taskcluster/scripts/builder/build-l10n.sh
@@ -0,0 +1,98 @@
+#! /bin/bash -vex
+
+set -x -e
+
+echo "running as" $(id)
+
+. /home/worker/scripts/xvfb.sh
+
+####
+# Taskcluster friendly wrapper for performing fx desktop l10n repacks via mozharness.
+# Based on ./build-linux.sh
+####
+
+# Inputs, with defaults
+
+: MOZHARNESS_SCRIPT ${MOZHARNESS_SCRIPT}
+: MOZHARNESS_CONFIG ${MOZHARNESS_CONFIG}
+: MOZHARNESS_ACTIONS ${MOZHARNESS_ACTIONS}
+: MOZHARNESS_OPTIONS ${MOZHARNESS_OPTIONS}
+
+: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/home/worker/tooltool-cache}
+
+: NEED_XVFB ${NEED_XVFB:=false}
+
+: WORKSPACE ${WORKSPACE:=/home/worker/workspace}
+
+set -v
+
+fail() {
+ echo # make sure error message is on a new line
+ echo "[build-l10n.sh:error]" "${@}"
+ exit 1
+}
+
+export MOZ_CRASHREPORTER_NO_REPORT=1
+export MOZ_OBJDIR=obj-firefox
+export TINDERBOX_OUTPUT=1
+
+# Ensure that in tree libraries can be found
+export LIBRARY_PATH=$LIBRARY_PATH:$WORKSPACE/src/obj-firefox:$WORKSPACE/src/gcc/lib64
+
+# test required parameters are supplied
+if [[ -z ${MOZHARNESS_SCRIPT} ]]; then fail "MOZHARNESS_SCRIPT is not set"; fi
+if [[ -z ${MOZHARNESS_CONFIG} ]]; then fail "MOZHARNESS_CONFIG is not set"; fi
+
+cleanup() {
+ local rv=$?
+ cleanup_xvfb
+ exit $rv
+}
+trap cleanup EXIT INT
+
+# run XVfb in the background, if necessary
+if $NEED_XVFB; then
+ start_xvfb '1024x768x24' 2
+fi
+
+# set up mozharness configuration, via command line, env, etc.
+
+# $TOOLTOOL_CACHE bypasses mozharness completely and is read by tooltool_wrapper.sh to set the
+# cache. However, only some mozharness scripts use tooltool_wrapper.sh, so this may not be
+# entirely effective.
+export TOOLTOOL_CACHE
+
+# support multiple, space delimited, config files
+config_cmds=""
+for cfg in $MOZHARNESS_CONFIG; do
+ config_cmds="${config_cmds} --config ${cfg}"
+done
+
+# if MOZHARNESS_ACTIONS is given, only run those actions (completely overriding default_actions
+# in the mozharness configuration)
+if [ -n "$MOZHARNESS_ACTIONS" ]; then
+ actions=""
+ for action in $MOZHARNESS_ACTIONS; do
+ actions="$actions --$action"
+ done
+fi
+
+# if MOZHARNESS_OPTIONS is given, append them to mozharness command line run
+# e.g. enable-pgo
+if [ -n "$MOZHARNESS_OPTIONS" ]; then
+ options=""
+ for option in $MOZHARNESS_OPTIONS; do
+ options="$options --$option"
+ done
+fi
+
+cd /home/worker
+
+python2.7 $WORKSPACE/build/src/testing/${MOZHARNESS_SCRIPT} \
+ --disable-mock \
+ --revision ${GECKO_HEAD_REV} \
+ $actions \
+ $options \
+ ${config_cmds} \
+ --log-level=debug \
+ --work-dir=$WORKSPACE/build \
diff --git a/taskcluster/scripts/builder/build-linux.sh b/taskcluster/scripts/builder/build-linux.sh
new file mode 100755
index 000000000..8885abdec
--- /dev/null
+++ b/taskcluster/scripts/builder/build-linux.sh
@@ -0,0 +1,122 @@
+#! /bin/bash -vex
+
+set -x -e
+
+echo "running as" $(id)
+
+. /home/worker/scripts/xvfb.sh
+
+####
+# Taskcluster friendly wrapper for performing fx desktop builds via mozharness.
+####
+
+# Inputs, with defaults
+
+: MOZHARNESS_SCRIPT ${MOZHARNESS_SCRIPT}
+: MOZHARNESS_CONFIG ${MOZHARNESS_CONFIG}
+: MOZHARNESS_ACTIONS ${MOZHARNESS_ACTIONS}
+: MOZHARNESS_OPTIONS ${MOZHARNESS_OPTIONS}
+
+: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/home/worker/tooltool-cache}
+
+: NEED_XVFB ${NEED_XVFB:=false}
+
+: MH_CUSTOM_BUILD_VARIANT_CFG ${MH_CUSTOM_BUILD_VARIANT_CFG}
+: MH_BRANCH ${MH_BRANCH:=mozilla-central}
+: MH_BUILD_POOL ${MH_BUILD_POOL:=staging}
+: MOZ_SCM_LEVEL ${MOZ_SCM_LEVEL:=1}
+
+: WORKSPACE ${WORKSPACE:=/home/worker/workspace}
+
+set -v
+
+fail() {
+ echo # make sure error message is on a new line
+ echo "[build-linux.sh:error]" "${@}"
+ exit 1
+}
+
+export MOZ_CRASHREPORTER_NO_REPORT=1
+export MOZ_OBJDIR=obj-firefox
+export TINDERBOX_OUTPUT=1
+
+# use "simple" package names so that they can be hard-coded in the task's
+# extras.locations
+export MOZ_SIMPLE_PACKAGE_NAME=target
+
+# Do not try to upload symbols (see https://bugzilla.mozilla.org/show_bug.cgi?id=1164615)
+export MOZ_AUTOMATION_UPLOAD_SYMBOLS=0
+
+# Ensure that in tree libraries can be found
+export LIBRARY_PATH=$LIBRARY_PATH:$WORKSPACE/src/obj-firefox:$WORKSPACE/src/gcc/lib64
+
+# test required parameters are supplied
+if [[ -z ${MOZHARNESS_SCRIPT} ]]; then fail "MOZHARNESS_SCRIPT is not set"; fi
+if [[ -z ${MOZHARNESS_CONFIG} ]]; then fail "MOZHARNESS_CONFIG is not set"; fi
+
+cleanup() {
+ local rv=$?
+ cleanup_xvfb
+ exit $rv
+}
+trap cleanup EXIT INT
+
+# run XVfb in the background, if necessary
+if $NEED_XVFB; then
+ start_xvfb '1024x768x24' 2
+fi
+
+# set up mozharness configuration, via command line, env, etc.
+
+debug_flag=""
+if [ 0$DEBUG -ne 0 ]; then
+ debug_flag='--debug'
+fi
+
+custom_build_variant_cfg_flag=""
+if [ -n "${MH_CUSTOM_BUILD_VARIANT_CFG}" ]; then
+ custom_build_variant_cfg_flag="--custom-build-variant-cfg=${MH_CUSTOM_BUILD_VARIANT_CFG}"
+fi
+
+# $TOOLTOOL_CACHE bypasses mozharness completely and is read by tooltool_wrapper.sh to set the
+# cache. However, only some mozharness scripts use tooltool_wrapper.sh, so this may not be
+# entirely effective.
+export TOOLTOOL_CACHE
+
+# support multiple, space delimited, config files
+config_cmds=""
+for cfg in $MOZHARNESS_CONFIG; do
+ config_cmds="${config_cmds} --config ${cfg}"
+done
+
+# if MOZHARNESS_ACTIONS is given, only run those actions (completely overriding default_actions
+# in the mozharness configuration)
+if [ -n "$MOZHARNESS_ACTIONS" ]; then
+ actions=""
+ for action in $MOZHARNESS_ACTIONS; do
+ actions="$actions --$action"
+ done
+fi
+
+# if MOZHARNESS_OPTIONS is given, append them to mozharness command line run
+# e.g. enable-pgo
+if [ -n "$MOZHARNESS_OPTIONS" ]; then
+ options=""
+ for option in $MOZHARNESS_OPTIONS; do
+ options="$options --$option"
+ done
+fi
+
+cd /home/worker
+
+python2.7 $WORKSPACE/build/src/testing/${MOZHARNESS_SCRIPT} ${config_cmds} \
+ $debug_flag \
+ $custom_build_variant_cfg_flag \
+ --disable-mock \
+ $actions \
+ $options \
+ --log-level=debug \
+ --scm-level=$MOZ_SCM_LEVEL \
+ --work-dir=$WORKSPACE/build \
+ --branch=${MH_BRANCH} \
+ --build-pool=${MH_BUILD_POOL}
diff --git a/taskcluster/scripts/builder/build-sm-mozjs-crate.sh b/taskcluster/scripts/builder/build-sm-mozjs-crate.sh
new file mode 100755
index 000000000..09c353084
--- /dev/null
+++ b/taskcluster/scripts/builder/build-sm-mozjs-crate.sh
@@ -0,0 +1,18 @@
+#!/usr/bin/env bash
+
+set -xe
+
+source $(dirname $0)/sm-tooltool-config.sh
+
+# Ensure that we have a .config/cargo that points us to our vendored crates
+# rather than to crates.io.
+cd "$SRCDIR/.cargo"
+sed -e "s|@top_srcdir@|$SRCDIR|" < config.in | tee config
+
+cd "$SRCDIR/js/src"
+
+export PATH="$PATH:$TOOLTOOL_CHECKOUT/cargo/bin:$TOOLTOOL_CHECKOUT/rustc/bin"
+export RUST_BACKTRACE=1
+
+cargo build --verbose --frozen --features debugmozjs
+cargo build --verbose --frozen
diff --git a/taskcluster/scripts/builder/build-sm-package.sh b/taskcluster/scripts/builder/build-sm-package.sh
new file mode 100755
index 000000000..6bb819f26
--- /dev/null
+++ b/taskcluster/scripts/builder/build-sm-package.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+
+set -xe
+
+source $(dirname $0)/sm-tooltool-config.sh
+
+mkdir -p $UPLOAD_DIR
+
+# Package up the sources into the release tarball.
+AUTOMATION=1 DIST=$UPLOAD_DIR $SRCDIR/js/src/make-source-package.sh
+
+# Extract the tarball into a new directory in the workspace.
+
+PACKAGE_DIR=$WORK/sm-package
+mkdir -p $PACKAGE_DIR
+pushd $PACKAGE_DIR
+
+tar -xjvf $UPLOAD_DIR/mozjs-*.tar.bz2
+
+: ${PYTHON:=python2.7}
+
+# Build the freshly extracted, packaged SpiderMonkey.
+pushd ./mozjs-*/js/src
+AUTOMATION=1 $PYTHON ./devtools/automation/autospider.py --skip-tests=checks $SPIDERMONKEY_VARIANT
+popd
+
+# Copy artifacts for upload by TaskCluster
+cp -rL ./mozjs-*/obj-spider/dist/bin/{js,jsapi-tests,js-gdb.py,libmozjs*} $UPLOAD_DIR
diff --git a/taskcluster/scripts/builder/build-sm.sh b/taskcluster/scripts/builder/build-sm.sh
new file mode 100755
index 000000000..d61a7a81c
--- /dev/null
+++ b/taskcluster/scripts/builder/build-sm.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+set -x
+
+source $(dirname $0)/sm-tooltool-config.sh
+
+: ${PYTHON:=python2.7}
+
+# Run the script
+export MOZ_UPLOAD_DIR="$UPLOAD_DIR"
+AUTOMATION=1 $PYTHON $SRCDIR/js/src/devtools/automation/autospider.py $SPIDERMONKEY_VARIANT
+BUILD_STATUS=$?
+
+# Ensure upload dir exists
+mkdir -p $UPLOAD_DIR
+
+# Copy artifacts for upload by TaskCluster
+cp -rL $SRCDIR/obj-spider/dist/bin/{js,jsapi-tests,js-gdb.py} $UPLOAD_DIR
+
+exit $BUILD_STATUS
diff --git a/taskcluster/scripts/builder/desktop-setup.sh b/taskcluster/scripts/builder/desktop-setup.sh
new file mode 100755
index 000000000..4b74a1201
--- /dev/null
+++ b/taskcluster/scripts/builder/desktop-setup.sh
@@ -0,0 +1,24 @@
+#!/bin/bash -ex
+
+test $MOZCONFIG # mozconfig is required...
+test -d $1 # workspace must exist at this point...
+WORKSPACE=$( cd "$1" && pwd )
+
+. setup-ccache.sh
+
+# Gecko source:
+export GECKO_DIR=$WORKSPACE/gecko
+# Gaia source:
+export GAIA_DIR=$WORKSPACE/gaia
+# Mozbuild config:
+export MOZBUILD_STATE_PATH=$WORKSPACE/mozbuild/
+
+# Create .mozbuild so mach doesn't complain about this
+mkdir -p $MOZBUILD_STATE_PATH
+
+### Install package dependencies
+install-packages.sh ${TOOLTOOL_DIR:-$GECKO_DIR}
+
+# Ensure object-folder exists
+export MOZ_OBJDIR=$WORKSPACE/object-folder/
+mkdir -p $MOZ_OBJDIR
diff --git a/taskcluster/scripts/builder/get-objdir.py b/taskcluster/scripts/builder/get-objdir.py
new file mode 100755
index 000000000..132e20d4f
--- /dev/null
+++ b/taskcluster/scripts/builder/get-objdir.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python2.7
+
+from __future__ import print_function
+import sys
+import os
+import json
+import subprocess
+from StringIO import StringIO
+
+gecko_dir = sys.argv[1]
+os.chdir(gecko_dir)
+
+result = subprocess.check_output(["./mach", "environment", "--format", "json"])
+environment = json.load(StringIO(result))
+
+topobjdir = environment["mozconfig"]["topobjdir"]
+if topobjdir is None:
+ topobjdir = sys.argv[2]
+
+print(topobjdir)
diff --git a/taskcluster/scripts/builder/hazard-analysis.sh b/taskcluster/scripts/builder/hazard-analysis.sh
new file mode 100755
index 000000000..d3e574742
--- /dev/null
+++ b/taskcluster/scripts/builder/hazard-analysis.sh
@@ -0,0 +1,149 @@
+#!/bin/bash -ex
+
+[ -n "$WORKSPACE" ]
+[ -n "$MOZ_OBJDIR" ]
+[ -n "$GECKO_DIR" ]
+
+HAZARD_SHELL_OBJDIR=$WORKSPACE/obj-haz-shell
+JS_SRCDIR=$GECKO_DIR/js/src
+ANALYSIS_SRCDIR=$JS_SRCDIR/devtools/rootAnalysis
+
+export CC="$TOOLTOOL_DIR/gcc/bin/gcc"
+export CXX="$TOOLTOOL_DIR/gcc/bin/g++"
+
+PYTHON=python2.7
+if ! which $PYTHON; then
+ PYTHON=python
+fi
+
+
+function check_commit_msg () {
+ ( set +e;
+ if [[ -n "$AUTOMATION" ]]; then
+ hg --cwd "$GECKO_DIR" log -r. --template '{desc}\n' | grep -F -q -- "$1"
+ else
+ echo -- "$SCRIPT_FLAGS" | grep -F -q -- "$1"
+ fi
+ )
+}
+
+if check_commit_msg "--dep"; then
+ HAZ_DEP=1
+fi
+
+function build_js_shell () {
+ # Must unset MOZ_OBJDIR and MOZCONFIG here to prevent the build system from
+ # inferring that the analysis output directory is the current objdir. We
+ # need a separate objdir here to build the opt JS shell to use to run the
+ # analysis.
+ (
+ unset MOZ_OBJDIR
+ unset MOZCONFIG
+ ( cd $JS_SRCDIR; autoconf-2.13 )
+ if [[ -z "$HAZ_DEP" ]]; then
+ [ -d $HAZARD_SHELL_OBJDIR ] && rm -rf $HAZARD_SHELL_OBJDIR
+ fi
+ mkdir -p $HAZARD_SHELL_OBJDIR || true
+ cd $HAZARD_SHELL_OBJDIR
+ $JS_SRCDIR/configure --enable-optimize --disable-debug --enable-ctypes --enable-nspr-build --without-intl-api --with-ccache
+ make -j4
+ ) # Restore MOZ_OBJDIR and MOZCONFIG
+}
+
+function configure_analysis () {
+ local analysis_dir
+ analysis_dir="$1"
+
+ if [[ -z "$HAZ_DEP" ]]; then
+ [ -d "$analysis_dir" ] && rm -rf "$analysis_dir"
+ fi
+
+ mkdir -p "$analysis_dir" || true
+ (
+ cd "$analysis_dir"
+ cat > defaults.py <<EOF
+js = "$HAZARD_SHELL_OBJDIR/dist/bin/js"
+analysis_scriptdir = "$ANALYSIS_SRCDIR"
+objdir = "$MOZ_OBJDIR"
+source = "$GECKO_DIR"
+sixgill = "$TOOLTOOL_DIR/sixgill/usr/libexec/sixgill"
+sixgill_bin = "$TOOLTOOL_DIR/sixgill/usr/bin"
+EOF
+
+ cat > run-analysis.sh <<EOF
+#!/bin/sh
+if [ \$# -eq 0 ]; then
+ set gcTypes
+fi
+export ANALYSIS_SCRIPTDIR="$ANALYSIS_SRCDIR"
+exec "$ANALYSIS_SRCDIR/analyze.py" "\$@"
+EOF
+ chmod +x run-analysis.sh
+ )
+}
+
+function run_analysis () {
+ local analysis_dir
+ analysis_dir="$1"
+ local build_type
+ build_type="$2"
+
+ if [[ -z "$HAZ_DEP" ]]; then
+ [ -d $MOZ_OBJDIR ] && rm -rf $MOZ_OBJDIR
+ fi
+
+ (
+ cd "$analysis_dir"
+ $PYTHON "$ANALYSIS_SRCDIR/analyze.py" --buildcommand="$GECKO_DIR/testing/mozharness/scripts/spidermonkey/build.${build_type}"
+ )
+}
+
+function grab_artifacts () {
+ local analysis_dir
+ analysis_dir="$1"
+ local artifacts
+ artifacts="$2"
+
+ (
+ cd "$analysis_dir"
+ ls -lah
+
+ # Do not error out if no files found
+ shopt -s nullglob
+ set +e
+ for f in *.txt *.lst; do
+ gzip -9 -c "$f" > "${artifacts}/$f.gz"
+ done
+
+ # Check whether the user requested .xdb file upload in the top commit comment
+ if check_commit_msg "--upload-xdbs"; then
+ HAZ_UPLOAD_XDBS=1
+ fi
+
+ if [ -n "$HAZ_UPLOAD_XDBS" ]; then
+ for f in *.xdb; do
+ bzip2 -c "$f" > "${artifacts}/$f.bz2"
+ done
+ fi
+ )
+}
+
+function check_hazards () {
+ (
+ set +e
+ NUM_HAZARDS=$(grep -c 'Function.*has unrooted.*live across GC call' "$1"/rootingHazards.txt)
+ NUM_UNSAFE=$(grep -c '^Function.*takes unsafe address of unrooted' "$1"/refs.txt)
+ NUM_UNNECESSARY=$(grep -c '^Function.* has unnecessary root' "$1"/unnecessary.txt)
+
+ set +x
+ echo "TinderboxPrint: rooting hazards<br/>$NUM_HAZARDS"
+ echo "TinderboxPrint: unsafe references to unrooted GC pointers<br/>$NUM_UNSAFE"
+ echo "TinderboxPrint: unnecessary roots<br/>$NUM_UNNECESSARY"
+
+ if [ $NUM_HAZARDS -gt 0 ]; then
+ echo "TEST-UNEXPECTED-FAIL $NUM_HAZARDS hazards detected" >&2
+ echo "TinderboxPrint: documentation<br/><a href='https://wiki.mozilla.org/Javascript:Hazard_Builds'>static rooting hazard analysis failures</a>, visit \"Inspect Task\" link for hazard details"
+ exit 1
+ fi
+ )
+}
diff --git a/taskcluster/scripts/builder/install-packages.sh b/taskcluster/scripts/builder/install-packages.sh
new file mode 100755
index 000000000..2f5cdf489
--- /dev/null
+++ b/taskcluster/scripts/builder/install-packages.sh
@@ -0,0 +1,13 @@
+#!/bin/bash -vex
+
+gecko_dir=$1
+test -d $gecko_dir
+test -n "$TOOLTOOL_CACHE"
+test -n "$TOOLTOOL_MANIFEST"
+test -n "$TOOLTOOL_REPO"
+test -n "$TOOLTOOL_REV"
+
+tc-vcs checkout $gecko_dir/tooltool $TOOLTOOL_REPO $TOOLTOOL_REPO $TOOLTOOL_REV
+
+(cd $gecko_dir; python $gecko_dir/tooltool/tooltool.py --url https://api.pub.build.mozilla.org/tooltool/ -m $gecko_dir/$TOOLTOOL_MANIFEST fetch -c $TOOLTOOL_CACHE)
+
diff --git a/taskcluster/scripts/builder/setup-ccache.sh b/taskcluster/scripts/builder/setup-ccache.sh
new file mode 100644
index 000000000..3c03b2640
--- /dev/null
+++ b/taskcluster/scripts/builder/setup-ccache.sh
@@ -0,0 +1,9 @@
+#! /bin/bash -ex
+
+test -d $1 # workspace must exist at this point...
+WORKSPACE=$( cd "$1" && pwd )
+
+export CCACHE_DIR=$WORKSPACE/ccache
+
+ccache -M 12G
+ccache -s
diff --git a/taskcluster/scripts/builder/sm-tooltool-config.sh b/taskcluster/scripts/builder/sm-tooltool-config.sh
new file mode 100755
index 000000000..b6a062858
--- /dev/null
+++ b/taskcluster/scripts/builder/sm-tooltool-config.sh
@@ -0,0 +1,50 @@
+#!/bin/bash
+
+set -xe
+
+: ${TOOLTOOL_SERVER:=https://api.pub.build.mozilla.org/tooltool/}
+: ${SPIDERMONKEY_VARIANT:=plain}
+: ${UPLOAD_DIR:=$HOME/artifacts/}
+: ${WORK:=$HOME/workspace}
+: ${SRCDIR:=$WORK/build/src}
+
+mkdir -p $WORK
+cd $WORK
+
+# Need to install things from tooltool. Figure out what platform to use.
+
+case $(uname -m) in
+ i686 | arm )
+ BITS=32
+ ;;
+ *)
+ BITS=64
+ ;;
+esac
+
+case "$OSTYPE" in
+ darwin*)
+ PLATFORM_OS=macosx
+ ;;
+ linux-gnu)
+ PLATFORM_OS=linux
+ ;;
+ msys)
+ PLATFORM_OS=win
+ ;;
+ *)
+ echo "Unrecognized OSTYPE '$OSTYPE'" >&2
+ PLATFORM_OS=linux
+ ;;
+esac
+
+# Install everything needed for the browser on this platform. Not all of it is
+# necessary for the JS shell, but it's less duplication to share tooltool
+# manifests.
+BROWSER_PLATFORM=$PLATFORM_OS$BITS
+: ${TOOLTOOL_MANIFEST:=browser/config/tooltool-manifests/$BROWSER_PLATFORM/releng.manifest}
+
+: ${TOOLTOOL_CHECKOUT:=$WORK}
+export TOOLTOOL_CHECKOUT
+
+(cd $TOOLTOOL_CHECKOUT && python ${SRCDIR}/testing/docker/recipes/tooltool.py --url $TOOLTOOL_SERVER -m $SRCDIR/$TOOLTOOL_MANIFEST fetch ${TOOLTOOL_CACHE:+ -c $TOOLTOOL_CACHE})
diff --git a/taskcluster/scripts/copy.sh b/taskcluster/scripts/copy.sh
new file mode 100755
index 000000000..931145a3b
--- /dev/null
+++ b/taskcluster/scripts/copy.sh
@@ -0,0 +1,9 @@
+#! /bin/bash -ex
+
+# This script copies the contents of the "scripts" folder into a docker
+# container using tar/untar the container id must be passed.
+
+DIRNAME=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+docker exec $1 mkdir -p $2
+cd $DIRNAME
+tar -cv * | docker exec -i $1 tar -x -C $2
diff --git a/taskcluster/scripts/misc/build-binutils-linux.sh b/taskcluster/scripts/misc/build-binutils-linux.sh
new file mode 100755
index 000000000..da0eb2724
--- /dev/null
+++ b/taskcluster/scripts/misc/build-binutils-linux.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+set -x -e -v
+
+# This script is for building binutils for Linux.
+
+WORKSPACE=$HOME/workspace
+HOME_DIR=$WORKSPACE/build
+UPLOAD_DIR=$WORKSPACE/artifacts
+
+cd $HOME_DIR/src
+
+build/unix/build-binutils/build-binutils.sh $HOME_DIR
+
+# Put a tarball in the artifacts dir
+mkdir -p $UPLOAD_DIR
+cp $HOME_DIR/binutils.tar.* $UPLOAD_DIR
diff --git a/taskcluster/scripts/misc/build-cctools.sh b/taskcluster/scripts/misc/build-cctools.sh
new file mode 100755
index 000000000..3eea0929d
--- /dev/null
+++ b/taskcluster/scripts/misc/build-cctools.sh
@@ -0,0 +1,82 @@
+#!/bin/bash
+set -x -e -v
+
+# This script is for building cctools (Apple's binutils) for Linux using
+# crosstool-ng (https://github.com/diorcety/crosstool-ng).
+
+WORKSPACE=$HOME/workspace
+UPLOAD_DIR=$WORKSPACE/artifacts
+
+# Repository info
+: CROSSTOOL_NG_REPOSITORY ${CROSSTOOL_NG_REPOSITORY:=https://github.com/diorcety/crosstool-ng}
+: CROSSTOOL_NG_REV ${CROSSTOOL_NG_REV:=master}
+
+# hacky
+ln -s `which gcc` ~/bin/x86_64-linux-gnu-gcc
+export PATH=$PATH:~/bin
+
+# Set some crosstools-ng directories
+CT_TOP_DIR=$WORKSPACE/crosstool-ng-build
+CT_PREFIX_DIR=$WORKSPACE/cctools
+CT_SRC_DIR=$CT_TOP_DIR/src
+CT_TARBALLS_DIR=$CT_TOP_DIR
+CT_WORK_DIR=$CT_SRC_DIR
+CT_LIB_DIR=$WORKSPACE/crosstool-ng
+CT_BUILD_DIR=$CT_TOP_DIR/build
+CT_LLVM_DIR=$WORKSPACE/clang
+CT_BUILDTOOLS_PREFIX_DIR=$CT_PREFIX_DIR
+
+# Create our directories
+rm -rf $CT_TOP_DIR
+mkdir $CT_TOP_DIR
+rm -rf $CT_PREFIX_DIR
+mkdir $CT_PREFIX_DIR
+mkdir -p $CT_SRC_DIR
+
+# Clone the crosstool-ng repo
+tc-vcs checkout $CT_LIB_DIR $CROSSTOOL_NG_REPOSITORY $CROSSTOOL_NG_REPOSITORY $CROSSTOOL_NG_REV
+
+# Fetch clang from tooltool
+cd $WORKSPACE
+wget -O tooltool.py https://raw.githubusercontent.com/mozilla/build-tooltool/master/tooltool.py
+chmod +x tooltool.py
+: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/home/worker/tooltool-cache}
+export TOOLTOOL_CACHE
+
+wget ${GECKO_HEAD_REPOSITORY}/raw-file/${GECKO_HEAD_REV}/browser/config/tooltool-manifests/linux64/clang.manifest
+
+python tooltool.py -v --manifest=clang.manifest fetch
+
+# Copy clang into the crosstools-ng srcdir
+cp -Rp $CT_LLVM_DIR $CT_SRC_DIR
+
+# Configure crosstools-ng
+sed=sed
+CT_CONNECT_TIMEOUT=5
+CT_BINUTILS_VERSION=809
+CT_PATCH_ORDER=bundled
+CT_BUILD=x86_64-linux-gnu
+CT_HOST=x86_64-linux-gnu
+CT_TARGET=x86_64-apple-darwin10
+CT_LLVM_FULLNAME=clang
+
+cd $CT_TOP_DIR
+
+# gets a bit too verbose here
+set +x
+
+. $CT_LIB_DIR/scripts/functions
+. $CT_LIB_DIR/scripts/build/binutils/cctools.sh
+
+# Build cctools
+do_binutils_get
+do_binutils_extract
+do_binutils_for_host
+
+set -x
+
+strip $CT_PREFIX_DIR/bin/*
+
+# Put a tarball in the artifacts dir
+mkdir -p $UPLOAD_DIR
+tar czf $UPLOAD_DIR/cctools.tar.gz -C $WORKSPACE `basename $CT_PREFIX_DIR`
diff --git a/taskcluster/scripts/misc/build-clang-linux.sh b/taskcluster/scripts/misc/build-clang-linux.sh
new file mode 100755
index 000000000..e1c6f2f0d
--- /dev/null
+++ b/taskcluster/scripts/misc/build-clang-linux.sh
@@ -0,0 +1,30 @@
+#!/bin/bash
+set -x -e -v
+
+# This script is for building clang for Linux.
+
+WORKSPACE=$HOME/workspace
+HOME_DIR=$WORKSPACE/build
+UPLOAD_DIR=$WORKSPACE/artifacts
+
+# Fetch our toolchain from tooltool
+cd $HOME_DIR
+wget -O tooltool.py https://raw.githubusercontent.com/mozilla/build-tooltool/master/tooltool.py
+chmod +x tooltool.py
+: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/home/worker/tooltool-cache}
+export TOOLTOOL_CACHE
+cd src
+$HOME_DIR/tooltool.py -m browser/config/tooltool-manifests/linux64/releng.manifest fetch
+
+# gets a bit too verbose here
+set +x
+
+cd build/build-clang
+# |mach python| sets up a virtualenv for us!
+../../mach python ./build-clang.py -c clang-static-analysis-linux64.json
+
+set -x
+
+# Put a tarball in the artifacts dir
+mkdir -p $UPLOAD_DIR
+cp clang.tar.* $UPLOAD_DIR
diff --git a/taskcluster/scripts/misc/build-clang-windows.sh b/taskcluster/scripts/misc/build-clang-windows.sh
new file mode 100755
index 000000000..6d2acaa03
--- /dev/null
+++ b/taskcluster/scripts/misc/build-clang-windows.sh
@@ -0,0 +1,61 @@
+#!/bin/bash
+
+set -x -e -v
+
+# This script is for building clang-cl on Windows.
+
+# Fetch our toolchain from tooltool.
+wget -O tooltool.py ${TOOLTOOL_REPO}/raw/${TOOLTOOL_REV}/tooltool.py
+chmod +x tooltool.py
+: TOOLTOOL_CACHE ${TOOLTOOL_CACHE:=/home/worker/tooltool-cache}
+export TOOLTOOL_CACHE
+
+TOOLTOOL_AUTH_FILE=/c/builds/relengapi.tok
+if [ ! -e ${TOOLTOOL_AUTH_FILE} ]; then
+ echo cannot find ${TOOLTOOL_AUTH_FILE}
+ exit 1
+fi
+
+TOOLTOOL_MANIFEST=build/src/browser/config/tooltool-manifests/win32/build-clang-cl.manifest
+./tooltool.py --authentication-file="${TOOLTOOL_AUTH_FILE}" -m "${TOOLTOOL_MANIFEST}" fetch
+
+# Set up all the Visual Studio paths.
+MSVC_DIR=vs2015u3
+VSWINPATH="$(cd ${MSVC_DIR} && pwd)"
+
+echo vswinpath ${VSWINPATH}
+
+export WINDOWSSDKDIR="${VSWINPATH}/SDK"
+export WIN32_REDIST_DIR="${VSWINPATH}/VC/redist/x86/Microsoft.VC140.CRT"
+export WIN_UCRT_REDIST_DIR="${VSWINPATH}/SDK/Redist/ucrt/DLLs/x86"
+
+export PATH="${VSWINPATH}/VC/bin/amd64_x86:${VSWINPATH}/VC/bin/amd64:${VSWINPATH}/VC/bin:${VSWINPATH}/SDK/bin/x86:${VSWINPATH}/SDK/bin/x64:${VSWINPATH}/DIA SDK/bin:${PATH}"
+export PATH="${VSWINPATH}/VC/redist/x86/Microsoft.VC140.CRT:${VSWINPATH}/VC/redist/x64/Microsoft.VC140.CRT:${VSWINPATH}/SDK/Redist/ucrt/DLLs/x86:${VSWINPATH}/SDK/Redist/ucrt/DLLs/x64:${PATH}"
+
+export INCLUDE="${VSWINPATH}/VC/include:${VSWINPATH}/VC/atlmfc/include:${VSWINPATH}/SDK/Include/10.0.14393.0/ucrt:${VSWINPATH}/SDK/Include/10.0.14393.0/shared:${VSWINPATH}/SDK/Include/10.0.14393.0/um:${VSWINPATH}/SDK/Include/10.0.14393.0/winrt:${VSWINPATH}/DIA SDK/include"
+export LIB="${VSWINPATH}/VC/lib:${VSWINPATH}/VC/atlmfc/lib:${VSWINPATH}/SDK/lib/10.0.14393.0/ucrt/x86:${VSWINPATH}/SDK/lib/10.0.14393.0/um/x86:${VSWINPATH}/DIA SDK/lib"
+
+export PATH="$(cd svn && pwd)/bin:${PATH}"
+export PATH="$(cd cmake && pwd)/bin:${PATH}"
+export PATH="$(cd ninja && pwd)/bin:${PATH}"
+
+# We use |mach python| to set up a virtualenv automatically for us. We create
+# a dummy mozconfig, because the default machinery for config.guess-choosing
+# of the objdir doesn't work very well.
+MOZCONFIG="$(pwd)/mozconfig"
+cat > ${MOZCONFIG} <<EOF
+mk_add_options MOZ_OBJDIR=$(pwd)/objdir
+EOF
+
+# gets a bit too verbose here
+set +x
+
+BUILD_CLANG_DIR=build/src/build/build-clang
+MOZCONFIG=${MOZCONFIG} build/src/mach python ${BUILD_CLANG_DIR}/build-clang.py -c ${BUILD_CLANG_DIR}/clang-static-analysis-win32.json
+
+set -x
+
+# Put a tarball in the artifacts dir
+UPLOAD_PATH=public/build
+mkdir -p ${UPLOAD_PATH}
+cp clang.tar.* ${UPLOAD_PATH}
diff --git a/taskcluster/scripts/misc/build-gcc-linux.sh b/taskcluster/scripts/misc/build-gcc-linux.sh
new file mode 100755
index 000000000..7621ec4aa
--- /dev/null
+++ b/taskcluster/scripts/misc/build-gcc-linux.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+set -e
+
+# This script is for building GCC for Linux.
+
+WORKSPACE=$HOME/workspace
+HOME_DIR=$WORKSPACE/build
+UPLOAD_DIR=$WORKSPACE/artifacts
+
+cd $HOME_DIR/src
+
+build/unix/build-gcc/build-gcc.sh $HOME_DIR
+
+# Put a tarball in the artifacts dir
+mkdir -p $UPLOAD_DIR
+cp $HOME_DIR/gcc.tar.* $UPLOAD_DIR
diff --git a/taskcluster/scripts/misc/minidump_stackwalk.sh b/taskcluster/scripts/misc/minidump_stackwalk.sh
new file mode 100755
index 000000000..de4fd748c
--- /dev/null
+++ b/taskcluster/scripts/misc/minidump_stackwalk.sh
@@ -0,0 +1,125 @@
+#!/bin/bash
+#
+# This script builds minidump_stackwalk binaries from the Google Breakpad
+# source for all of the operating systems that we run Firefox tests on:
+# Linux x86, Linux x86-64, Windows x86, OS X x86-64.
+#
+# It expects to be run in the luser/breakpad-builder:0.7 Docker image and
+# needs access to the relengapiproxy to download internal tooltool files.
+
+set -v -e -x
+
+# This is a pain to support properly with gclient.
+#: BREAKPAD_REPO ${BREAKPAD_REPO:=https://google-breakpad.googlecode.com/svn/trunk/}
+: BREAKPAD_REV "${BREAKPAD_REV:=master}"
+: STACKWALK_HTTP_REPO "${STACKWALK_HTTP_REPO:=https://hg.mozilla.org/users/tmielczarek_mozilla.com/stackwalk-http}"
+: STACKWALK_HTTP_REV "${STACKWALK_HTTP_REV:=default}"
+
+ncpu=$(getconf _NPROCESSORS_ONLN)
+
+function build()
+{
+ cd /tmp
+ local platform=$1
+ local strip_prefix=$2
+ local configure_args=$3
+ local make_args=$4
+ local objdir=/tmp/obj-breakpad-$platform
+ local ext=
+ if test "$platform" = "win32"; then
+ ext=.exe
+ fi
+ rm -rf "$objdir"
+ mkdir "$objdir"
+ # First, build Breakpad
+ cd "$objdir"
+ # shellcheck disable=SC2086
+ CFLAGS="-O2 $CFLAGS" CXXFLAGS="-O2 $CXXFLAGS" /tmp/breakpad/src/configure --disable-tools $configure_args
+ # shellcheck disable=SC2086
+ make -j$ncpu $make_args src/libbreakpad.a src/third_party/libdisasm/libdisasm.a src/processor/stackwalk_common.o
+ # Second, build stackwalk-http
+ make -f /tmp/stackwalk-http/Makefile BREAKPAD_SRCDIR=/tmp/breakpad/src "BREAKPAD_OBJDIR=$(pwd)" "OS=$platform" "-j$ncpu"
+ "${strip_prefix}strip" "stackwalk${ext}"
+ cp "stackwalk${ext}" "/tmp/stackwalker/${platform}-minidump_stackwalk${ext}"
+}
+
+function linux64()
+{
+ export LDFLAGS="-static-libgcc -static-libstdc++"
+ build linux64
+ unset LDFLAGS
+}
+
+function linux32()
+{
+ export LDFLAGS="-static-libgcc -static-libstdc++ -L/tmp/libcurl-i386/lib"
+ export CFLAGS="-m32 -I/tmp/libcurl-i386/include"
+ export CXXFLAGS="-m32 -I/tmp/libcurl-i386/include"
+ build linux32 "" "--enable-m32"
+ unset LDFLAGS CFLAGS CXXFLAGS
+}
+
+function macosx64()
+{
+ cd /tmp
+ if ! test -d MacOSX10.7.sdk; then
+ python tooltool.py -v --manifest=macosx-sdk.manifest --url=http://relengapi/tooltool/ fetch
+ fi
+ export MACOSX_SDK=/tmp/MacOSX10.7.sdk
+ export CCTOOLS=/tmp/cctools
+ local FLAGS="-stdlib=libc++ -target x86_64-apple-darwin10 -mlinker-version=136 -B /tmp/cctools/bin -isysroot ${MACOSX_SDK} -mmacosx-version-min=10.7"
+ export CC="clang $FLAGS"
+ export CXX="clang++ $FLAGS -std=c++11"
+ local old_path="$PATH"
+ export PATH="/tmp/clang/bin:/tmp/cctools/bin/:$PATH"
+ export LD_LIBRARY_PATH=/usr/lib/llvm-3.6/lib/
+
+ build macosx64 "/tmp/cctools/bin/x86_64-apple-darwin10-" "--host=x86_64-apple-darwin10" "AR=/tmp/cctools/bin/x86_64-apple-darwin10-ar"
+
+ unset CC CXX LD_LIBRARY_PATH MACOSX_SDK CCTOOLS
+ export PATH="$old_path"
+}
+
+function win32()
+{
+ export LDFLAGS="-static-libgcc -static-libstdc++"
+ export CFLAGS="-D__USE_MINGW_ANSI_STDIO=1"
+ export CXXFLAGS="-D__USE_MINGW_ANSI_STDIO=1"
+ export ZLIB_DIR=/tmp/zlib-mingw
+ build win32 "i686-w64-mingw32-" "--host=i686-w64-mingw32"
+ unset LDFLAGS CFLAGS CXXFLAGS ZLIB_DIR
+}
+
+cd /tmp
+if ! test -d depot_tools; then
+ git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git
+else
+ (cd depot_tools; git pull origin master)
+fi
+export PATH=$(pwd)/depot_tools:"$PATH"
+if ! test -d breakpad; then
+ mkdir breakpad
+ pushd breakpad
+ fetch breakpad
+ popd
+else
+ pushd breakpad/src
+ git pull origin master
+ popd
+fi
+pushd breakpad/src
+git checkout "${BREAKPAD_REV}"
+gclient sync
+popd
+
+(cd breakpad/src; git rev-parse master)
+if ! test -d stackwalk-http; then
+ hg clone -u "$STACKWALK_HTTP_REV" "$STACKWALK_HTTP_REPO"
+else
+ (cd stackwalk-http && hg pull "$STACKWALK_HTTP_REPO" && hg up "$STACKWALK_HTTP_REV")
+fi
+mkdir -p stackwalker
+linux64
+linux32
+macosx64
+win32
diff --git a/taskcluster/scripts/misc/repackage-jdk-centos.sh b/taskcluster/scripts/misc/repackage-jdk-centos.sh
new file mode 100755
index 000000000..2c952602b
--- /dev/null
+++ b/taskcluster/scripts/misc/repackage-jdk-centos.sh
@@ -0,0 +1,45 @@
+#! /bin/bash
+
+set -e -x
+
+mkdir -p artifacts
+pushd build
+
+rm -rf root && mkdir root && cd root
+
+# change these variables when updating java version
+mirror_url_base="http://mirror.centos.org/centos/6.7/updates/x86_64/Packages"
+openjdk=java-1.7.0-openjdk-1.7.0.85-2.6.1.3.el6_7.x86_64.rpm
+openjdk_devel=java-1.7.0-openjdk-devel-1.7.0.85-2.6.1.3.el6_7.x86_64.rpm
+jvm_openjdk_dir=java-1.7.0-openjdk-1.7.0.85.x86_64
+
+# grab the rpm and unpack it
+wget ${mirror_url_base}/${openjdk}
+wget ${mirror_url_base}/${openjdk_devel}
+rpm2cpio $openjdk | cpio -ivd
+rpm2cpio $openjdk_devel | cpio -ivd
+
+cd usr/lib/jvm
+mv $jvm_openjdk_dir java_home
+
+# cacerts is a relative symlink, which doesn't work when we repackage. Make it
+# absolute. We could use tar's --dereference option, but there's a subtle
+# difference between making the symlink absolute and using --dereference.
+# Making the symlink absolute lets the consuming system set the cacerts; using
+# --dereference takes the producing system's cacerts and sets them in stone. We
+# prefer the flexibility of the former.
+rm java_home/jre/lib/security/cacerts
+ln -s /etc/pki/java/cacerts java_home/jre/lib/security/cacerts
+
+# document version this is based on
+echo "Built from ${mirror_url_Base}
+ ${openjdk}
+ ${openjdk_devel}
+
+Run through rpm2cpio | cpio, and /usr/lib/jvm/${jvm_openjdk_dir} renamed to 'java_home'." > java_home/VERSION
+
+# tarball the unpacked rpm and put it in the taskcluster upload artifacts dir
+tar -Jvcf java_home-${jvm_openjdk_dir}.tar.xz java_home
+popd
+
+mv build/root/usr/lib/jvm/java_home-${jvm_openjdk_dir}.tar.xz artifacts
diff --git a/taskcluster/scripts/tester/harness-test-linux.sh b/taskcluster/scripts/tester/harness-test-linux.sh
new file mode 100644
index 000000000..b38ffa124
--- /dev/null
+++ b/taskcluster/scripts/tester/harness-test-linux.sh
@@ -0,0 +1,40 @@
+#! /bin/bash -vex
+
+set -x -e
+
+echo "running as" $(id)
+
+####
+# Taskcluster friendly wrapper for running a script in
+# testing/mozharness/scripts in a source checkout (no build).
+# Example use: Python-only harness unit tests
+####
+
+: WORKSPACE ${WORKSPACE:=/home/worker/workspace}
+: SRC_ROOT ${SRC_ROOT:=$WORKSPACE/build/src}
+# These paths should be relative to $SRC_ROOT
+: MOZHARNESS_SCRIPT ${MOZHARNESS_SCRIPT}
+: MOZHARNESS_CONFIG ${MOZHARNESS_CONFIG}
+: mozharness args "${@}"
+
+set -v
+cd $WORKSPACE
+
+fail() {
+ echo # make sure error message is on a new line
+ echo "[harness-test-linux.sh:error]" "${@}"
+ exit 1
+}
+
+if [[ -z ${MOZHARNESS_SCRIPT} ]]; then fail "MOZHARNESS_SCRIPT is not set"; fi
+
+# support multiple, space delimited, config files
+config_cmds=""
+for cfg in $MOZHARNESS_CONFIG; do
+ config_cmds="${config_cmds} --config-file ${SRC_ROOT}/${cfg}"
+done
+
+python2.7 $SRC_ROOT/${MOZHARNESS_SCRIPT} ${config_cmds} "${@}"
+
+
+
diff --git a/taskcluster/scripts/tester/run-wizard b/taskcluster/scripts/tester/run-wizard
new file mode 100755
index 000000000..5dafb0b62
--- /dev/null
+++ b/taskcluster/scripts/tester/run-wizard
@@ -0,0 +1,170 @@
+#!/usr/bin/env python
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this,
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from __future__ import print_function, unicode_literals
+
+import datetime
+import os
+import subprocess
+import sys
+import time
+from textwrap import wrap
+
+here = os.path.dirname(os.path.abspath(__file__))
+
+
+def call(cmd, **kwargs):
+ print(" ".join(cmd))
+ return subprocess.call(cmd, **kwargs)
+
+
+def wait_for_run_mozharness(timeout=30):
+ starttime = datetime.datetime.now()
+ while datetime.datetime.now() - starttime < datetime.timedelta(seconds=timeout):
+ if os.path.isfile(os.path.join(here, 'run-mozharness')):
+ break
+ time.sleep(0.2)
+ else:
+ print("Timed out after %d seconds waiting for the 'run-mozharness' binary" % timeout)
+ return 1
+
+
+def resume():
+ wait_for_run_mozharness()
+ call(['run-mozharness'])
+
+
+def setup():
+ """Run the mozharness script without the 'run-tests' action.
+
+ This will do all the necessary setup steps like creating a virtualenv and
+ downloading the tests and firefox binary. But it stops before running the
+ tests.
+ """
+ wait_for_run_mozharness()
+ status = call(['run-mozharness', '--no-run-tests'])
+
+ if status:
+ # something went wrong
+ return status
+
+ build_dir = os.path.expanduser(os.path.join('~', 'workspace', 'build'))
+ mach_src = os.path.join(build_dir, 'tests', 'mach')
+ mach_dest = os.path.expanduser(os.path.join('~', 'bin', 'mach'))
+
+ if os.path.exists(mach_dest):
+ os.remove(mach_dest)
+ os.symlink(mach_src, mach_dest)
+
+ activate = os.path.join(build_dir, 'venv', 'bin', 'activate')
+ if os.path.isfile(activate):
+ # TODO Support other shells
+ bashrc = os.path.expanduser(os.path.join('~', '.bashrc'))
+ with open(bashrc, 'ab') as f:
+ f.write(". {}".format(activate))
+
+ print("""
+Mozharness has finished downloading the build and tests to:
+{}
+
+A limited mach environment has also been set up and added to the $PATH, but
+it may be missing the command you need. To see a list of commands, run:
+ $ mach help
+""".lstrip().format(build_dir))
+
+
+def clone():
+ """Clone the correct gecko repository and update to the proper revision."""
+ base_repo = os.environ['GECKO_HEAD_REPOSITORY']
+ dest = os.path.expanduser(os.path.join('~', 'gecko'))
+
+ # Specify method to checkout a revision. This defaults to revisions as
+ # SHA-1 strings, but also supports symbolic revisions like `tip` via the
+ # branch flag.
+ if os.environ.get('GECKO_HEAD_REV'):
+ revision_flag = b'--revision'
+ revision = os.environ['GECKO_HEAD_REV']
+ elif os.environ.get('GECKO_HEAD_REF'):
+ revision_flag = b'--branch'
+ revision = os.environ['GECKO_HEAD_REF']
+ else:
+ print('revision is not specified for checkout')
+ return 1
+
+ # TODO Bug 1301382 - pin hg.mozilla.org fingerprint.
+ call([
+ b'/usr/bin/hg', b'robustcheckout',
+ b'--sharebase', os.environ['HG_STORE_PATH'],
+ b'--purge',
+ b'--upstream', b'https://hg.mozilla.org/mozilla-unified',
+ revision_flag, revision,
+ base_repo, dest
+ ])
+ print("Finished cloning to {} at revision {}.".format(dest, revision))
+
+
+def exit():
+ pass
+
+
+OPTIONS = [
+ ('Resume task', resume,
+ "Resume the original task without modification. This can be useful for "
+ "passively monitoring it from another shell."),
+ ('Setup task', setup,
+ "Setup the task (download the application and tests) but don't run the "
+ "tests just yet. The tests can be run with a custom configuration later. "
+ "This will provide a mach environment (experimental)."),
+ ('Clone gecko', clone,
+ "Perform a clone of gecko using the task's repo and update it to the "
+ "task's revision."),
+ ('Exit', exit, "Exit this wizard and return to the shell.")
+]
+
+
+def _fmt_options():
+ max_line_len = 60
+ max_name_len = max(len(o[0]) for o in OPTIONS)
+
+ # TODO Pad will be off if there are more than 9 options.
+ pad = ' ' * (max_name_len+6)
+
+ msg = []
+ for i, (name, _, desc) in enumerate(OPTIONS):
+ desc = wrap(desc, width=max_line_len)
+ desc = [desc[0]] + [pad + l for l in desc[1:]]
+
+ optstr = '{}) {} - {}\n'.format(
+ i+1, name.ljust(max_name_len), '\n'.join(desc))
+ msg.append(optstr)
+ msg.append("Select one of the above options: ")
+ return '\n'.join(msg)
+
+
+def wizard():
+ print("This wizard can help you get started with some common debugging "
+ "workflows.\nWhat would you like to do?\n")
+ print(_fmt_options(), end="")
+ choice = None
+ while True:
+ choice = raw_input().decode('utf8')
+ try:
+ choice = int(choice)-1
+ if 0 <= choice < len(OPTIONS):
+ break
+ except ValueError:
+ pass
+
+ print("Must provide an integer from 1-{}:".format(len(OPTIONS)))
+
+ func = OPTIONS[choice][1]
+ ret = func()
+
+ print("Use the 'run-wizard' command to start this wizard again.")
+ return ret
+
+
+if __name__ == '__main__':
+ sys.exit(wizard())
diff --git a/taskcluster/scripts/tester/test-b2g.sh b/taskcluster/scripts/tester/test-b2g.sh
new file mode 100644
index 000000000..43a54f93a
--- /dev/null
+++ b/taskcluster/scripts/tester/test-b2g.sh
@@ -0,0 +1,118 @@
+#! /bin/bash -xe
+
+set -x -e
+
+echo "running as" $(id)
+
+####
+# Taskcluster friendly wrapper for performing fx desktop tests via mozharness.
+####
+
+# Inputs, with defaults
+
+: MOZHARNESS_URL ${MOZHARNESS_URL}
+: MOZHARNESS_SCRIPT ${MOZHARNESS_SCRIPT}
+: MOZHARNESS_CONFIG ${MOZHARNESS_CONFIG}
+: NEED_XVFB ${NEED_XVFB:=true}
+: NEED_PULSEAUDIO ${NEED_PULSEAUDIO:=false}
+: NEED_PULL_GAIA ${NEED_PULL_GAIA:=false}
+: SKIP_MOZHARNESS_RUN ${SKIP_MOZHARNESS_RUN:=false}
+: WORKSPACE ${WORKSPACE:=/home/worker/workspace}
+: mozharness args "${@}"
+
+set -v
+cd $WORKSPACE
+
+# test required parameters are supplied
+if [[ -z ${MOZHARNESS_URL} ]]; then exit 1; fi
+if [[ -z ${MOZHARNESS_SCRIPT} ]]; then exit 1; fi
+if [[ -z ${MOZHARNESS_CONFIG} ]]; then exit 1; fi
+
+mkdir -p ~/artifacts/public
+
+cleanup() {
+ if [ -n "$xvfb_pid" ]; then
+ kill $xvfb_pid || true
+ fi
+}
+trap cleanup EXIT INT
+
+# Unzip the mozharness ZIP file created by the build task
+curl --fail -o mozharness.zip --retry 10 -L $MOZHARNESS_URL
+rm -rf mozharness
+unzip -q mozharness.zip
+rm mozharness.zip
+
+if ! [ -d mozharness ]; then
+ echo "mozharness zip did not contain mozharness/"
+ exit 1
+fi
+
+# start up the pulseaudio daemon. Note that it's important this occur
+# before the Xvfb startup.
+if $NEED_PULSEAUDIO; then
+ pulseaudio --fail --daemonize --start
+ pactl load-module module-null-sink
+fi
+
+# run XVfb in the background, if necessary
+if $NEED_XVFB; then
+ Xvfb :0 -nolisten tcp -screen 0 1600x1200x24 \
+ > ~/artifacts/public/xvfb.log 2>&1 &
+ export DISPLAY=:0
+ xvfb_pid=$!
+ # Only error code 255 matters, because it signifies that no
+ # display could be opened. As long as we can open the display
+ # tests should work. We'll retry a few times with a sleep before
+ # failing.
+ retry_count=0
+ max_retries=2
+ xvfb_test=0
+ until [ $retry_count -gt $max_retries ]; do
+ xvinfo || xvfb_test=$?
+ if [ $xvfb_test != 255 ]; then
+ retry_count=$(($max_retries + 1))
+ else
+ retry_count=$(($retry_count + 1))
+ echo "Failed to start Xvfb, retry: $retry_count"
+ sleep 2
+ fi done
+ if [ $xvfb_test == 255 ]; then exit 255; fi
+fi
+
+gaia_cmds=""
+if $NEED_PULL_GAIA; then
+ # test required parameters are supplied
+ if [[ -z ${GAIA_BASE_REPOSITORY} ]]; then exit 1; fi
+ if [[ -z ${GAIA_HEAD_REPOSITORY} ]]; then exit 1; fi
+ if [[ -z ${GAIA_REV} ]]; then exit 1; fi
+ if [[ -z ${GAIA_REF} ]]; then exit 1; fi
+
+ tc-vcs checkout \
+ ${WORKSPACE}/gaia \
+ ${GAIA_BASE_REPOSITORY} \
+ ${GAIA_HEAD_REPOSITORY} \
+ ${GAIA_REV} \
+ ${GAIA_REF}
+
+ gaia_cmds="--gaia-dir=${WORKSPACE}"
+fi
+
+# support multiple, space delimited, config files
+config_cmds=""
+for cfg in $MOZHARNESS_CONFIG; do
+ config_cmds="${config_cmds} --config-file ${cfg}"
+done
+
+if [ ${SKIP_MOZHARNESS_RUN} == true ]; then
+ # Skipping Mozharness is to allow the developer start the window manager
+ # properly and letting them change the execution of Mozharness without
+ # exiting the container
+ echo "We skipped running Mozharness."
+ echo "Make sure you export DISPLAY=:0 before calling Mozharness."
+ echo "Don't forget to call it with 'sudo -E -u worker'."
+else
+ # run the given mozharness script and configs, but pass the rest of the
+ # arguments in from our own invocation
+ python2.7 $WORKSPACE/${MOZHARNESS_SCRIPT} ${config_cmds} ${gaia_cmds} "${@}"
+fi
diff --git a/taskcluster/scripts/tester/test-macosx.sh b/taskcluster/scripts/tester/test-macosx.sh
new file mode 100644
index 000000000..8c2b758fb
--- /dev/null
+++ b/taskcluster/scripts/tester/test-macosx.sh
@@ -0,0 +1,77 @@
+#! /bin/bash -xe
+
+set -x -e
+
+echo "running as" $(id)
+
+####
+# Taskcluster friendly wrapper for performing fx Mac OSX tests via mozharness.
+####
+
+# Inputs, with defaults
+
+: MOZHARNESS_URL ${MOZHARNESS_URL}
+: MOZHARNESS_SCRIPT ${MOZHARNESS_SCRIPT}
+: MOZHARNESS_CONFIG ${MOZHARNESS_CONFIG}
+
+WORKSPACE=$HOME
+cd $WORKSPACE
+
+rm -rf artifacts
+mkdir artifacts
+
+# test required parameters are supplied
+if [[ -z ${MOZHARNESS_URL} ]]; then fail "MOZHARNESS_URL is not set"; fi
+if [[ -z ${MOZHARNESS_SCRIPT} ]]; then fail "MOZHARNESS_SCRIPT is not set"; fi
+if [[ -z ${MOZHARNESS_CONFIG} ]]; then fail "MOZHARNESS_CONFIG is not set"; fi
+
+# Download mozharness with exponential backoff
+# curl already applies exponential backoff, but not for all
+# failed cases, apparently, as we keep getting failed downloads
+# with 404 code.
+download_mozharness() {
+ local max_attempts=10
+ local timeout=1
+ local attempt=0
+
+ echo "Downloading mozharness"
+
+ while [[ $attempt < $max_attempts ]]; do
+ if curl --fail -o mozharness.zip --retry 10 -L $MOZHARNESS_URL; then
+ rm -rf mozharness
+ if unzip -q mozharness.zip; then
+ return 0
+ fi
+ echo "error unzipping mozharness.zip" >&2
+ else
+ echo "failed to download mozharness zip" >&2
+ fi
+ echo "Download failed, retrying in $timeout seconds..." >&2
+ sleep $timeout
+ timeout=$((timeout*2))
+ attempt=$((attempt+1))
+ done
+
+ fail "Failed to download and unzip mozharness"
+}
+
+download_mozharness
+rm mozharness.zip
+
+# For telemetry purposes, the build process wants information about the
+# source it is running; tc-vcs obscures this a little, but we can provide
+# it directly.
+export MOZ_SOURCE_REPO="${GECKO_HEAD_REPOSITORY}"
+export MOZ_SOURCE_CHANGESET="${GECKO_HEAD_REV}"
+
+# support multiple, space delimited, config files
+config_cmds=""
+for cfg in $MOZHARNESS_CONFIG; do
+ config_cmds="${config_cmds} --config-file ${cfg}"
+done
+
+rm -rf build logs properties target.dmg
+
+# run the given mozharness script and configs, but pass the rest of the
+# arguments in from our own invocation
+python2.7 $WORKSPACE/mozharness/scripts/${MOZHARNESS_SCRIPT} ${config_cmds} "${@}"
diff --git a/taskcluster/scripts/tester/test-ubuntu.sh b/taskcluster/scripts/tester/test-ubuntu.sh
new file mode 100644
index 000000000..0c2ccc702
--- /dev/null
+++ b/taskcluster/scripts/tester/test-ubuntu.sh
@@ -0,0 +1,188 @@
+#! /bin/bash -xe
+
+set -x -e
+
+echo "running as" $(id)
+
+# Detect release version.
+. /etc/lsb-release
+if [ "${DISTRIB_RELEASE}" == "12.04" ]; then
+ UBUNTU_1204=1
+elif [ "${DISTRIB_RELEASE}" == "16.04" ]; then
+ UBUNTU_1604=1
+fi
+
+. /home/worker/scripts/xvfb.sh
+
+####
+# Taskcluster friendly wrapper for performing fx desktop tests via mozharness.
+####
+
+# Inputs, with defaults
+
+: MOZHARNESS_PATH ${MOZHARNESS_PATH}
+: MOZHARNESS_URL ${MOZHARNESS_URL}
+: MOZHARNESS_SCRIPT ${MOZHARNESS_SCRIPT}
+: MOZHARNESS_CONFIG ${MOZHARNESS_CONFIG}
+: NEED_XVFB ${NEED_XVFB:=true}
+: NEED_WINDOW_MANAGER ${NEED_WINDOW_MANAGER:=false}
+: NEED_PULSEAUDIO ${NEED_PULSEAUDIO:=false}
+: START_VNC ${START_VNC:=false}
+: TASKCLUSTER_INTERACTIVE ${TASKCLUSTER_INTERACTIVE:=false}
+: WORKSPACE ${WORKSPACE:=/home/worker/workspace}
+: mozharness args "${@}"
+
+set -v
+cd $WORKSPACE
+
+fail() {
+ echo # make sure error message is on a new line
+ echo "[test-linux.sh:error]" "${@}"
+ exit 1
+}
+
+maybe_start_pulse() {
+ if $NEED_PULSEAUDIO; then
+ pulseaudio --fail --daemonize --start
+ pactl load-module module-null-sink
+ fi
+}
+
+# test required parameters are supplied
+if [ -z "${MOZHARNESS_PATH}" -a -z "${MOZHARNESS_URL}" ]; then
+ fail "MOZHARNESS_PATH or MOZHARNESS_URL must be defined";
+fi
+
+if [[ -z ${MOZHARNESS_SCRIPT} ]]; then fail "MOZHARNESS_SCRIPT is not set"; fi
+if [[ -z ${MOZHARNESS_CONFIG} ]]; then fail "MOZHARNESS_CONFIG is not set"; fi
+
+mkdir -p ~/artifacts/public
+
+cleanup() {
+ local rv=$?
+ if [[ -s /home/worker/.xsession-errors ]]; then
+ # To share X issues
+ cp /home/worker/.xsession-errors ~/artifacts/public/xsession-errors.log
+ fi
+ cleanup_xvfb
+ exit $rv
+}
+trap cleanup EXIT INT
+
+# Download mozharness with exponential backoff
+# curl already applies exponential backoff, but not for all
+# failed cases, apparently, as we keep getting failed downloads
+# with 404 code.
+download_mozharness() {
+ local max_attempts=10
+ local timeout=1
+ local attempt=0
+
+ echo "Downloading mozharness"
+
+ while [[ $attempt < $max_attempts ]]; do
+ if curl --fail -o mozharness.zip --retry 10 -L $MOZHARNESS_URL; then
+ rm -rf mozharness
+ if unzip -q mozharness.zip; then
+ return 0
+ fi
+ echo "error unzipping mozharness.zip" >&2
+ else
+ echo "failed to download mozharness zip" >&2
+ fi
+ echo "Download failed, retrying in $timeout seconds..." >&2
+ sleep $timeout
+ timeout=$((timeout*2))
+ attempt=$((attempt+1))
+ done
+
+ fail "Failed to download and unzip mozharness"
+}
+
+# Download mozharness if we're told to.
+if [ ${MOZHARNESS_URL} ]; then
+ download_mozharness
+ rm mozharness.zip
+
+ if ! [ -d mozharness ]; then
+ fail "mozharness zip did not contain mozharness/"
+ fi
+
+ MOZHARNESS_PATH=`pwd`/mozharness
+fi
+
+# pulseaudio daemon must be started before xvfb on Ubuntu 12.04.
+if [ "${UBUNTU_1204}" ]; then
+ maybe_start_pulse
+fi
+
+# run XVfb in the background, if necessary
+if $NEED_XVFB; then
+ start_xvfb '1600x1200x24' 0
+fi
+
+if $START_VNC; then
+ x11vnc > ~/artifacts/public/x11vnc.log 2>&1 &
+fi
+
+if $NEED_WINDOW_MANAGER; then
+ # This is read by xsession to select the window manager
+ echo DESKTOP_SESSION=ubuntu > /home/worker/.xsessionrc
+
+ # note that doing anything with this display before running Xsession will cause sadness (like,
+ # crashes in compiz). Make sure that X has enough time to start
+ sleep 15
+ # DISPLAY has already been set above
+ # XXX: it would be ideal to add a semaphore logic to make sure that the
+ # window manager is ready
+ /etc/X11/Xsession 2>&1 &
+
+ # Turn off the screen saver and screen locking
+ gsettings set org.gnome.desktop.screensaver idle-activation-enabled false
+ gsettings set org.gnome.desktop.screensaver lock-enabled false
+ gsettings set org.gnome.desktop.screensaver lock-delay 3600
+ # Disable the screen saver
+ xset s off s reset
+
+ if [ "${UBUNTU_1604}" ]; then
+ # start compiz for our window manager
+ compiz 2>&1 &
+ #TODO: how to determine if compiz starts correctly?
+ fi
+fi
+
+if [ "${UBUNTU_1604}" ]; then
+ maybe_start_pulse
+fi
+
+# For telemetry purposes, the build process wants information about the
+# source it is running; tc-vcs obscures this a little, but we can provide
+# it directly.
+export MOZ_SOURCE_REPO="${GECKO_HEAD_REPOSITORY}"
+export MOZ_SOURCE_CHANGESET="${GECKO_HEAD_REV}"
+
+# support multiple, space delimited, config files
+config_cmds=""
+for cfg in $MOZHARNESS_CONFIG; do
+ config_cmds="${config_cmds} --config-file ${MOZHARNESS_PATH}/configs/${cfg}"
+done
+
+mozharness_bin="/home/worker/bin/run-mozharness"
+
+# Save the computed mozharness command to a binary which is useful
+# for interactive mode.
+echo -e "#!/usr/bin/env bash
+# Some mozharness scripts assume base_work_dir is in
+# the current working directory, see bug 1279237
+cd $WORKSPACE
+cmd=\"python2.7 ${MOZHARNESS_PATH}/scripts/${MOZHARNESS_SCRIPT} ${config_cmds} ${@} \${@}\"
+echo \"Running: \${cmd}\"
+exec \${cmd}" > ${mozharness_bin}
+chmod +x ${mozharness_bin}
+
+# In interactive mode, the user will be prompted with options for what to do.
+if ! $TASKCLUSTER_INTERACTIVE; then
+ # run the given mozharness script and configs, but pass the rest of the
+ # arguments in from our own invocation
+ ${mozharness_bin};
+fi