summaryrefslogtreecommitdiffstats
path: root/security/nss/automation/taskcluster
diff options
context:
space:
mode:
Diffstat (limited to 'security/nss/automation/taskcluster')
-rw-r--r--security/nss/automation/taskcluster/docker-clang-3.9/setup.sh4
-rw-r--r--security/nss/automation/taskcluster/docker-decision/Dockerfile3
-rw-r--r--security/nss/automation/taskcluster/docker-decision/bin/checkout.sh5
-rw-r--r--security/nss/automation/taskcluster/docker-gcc-4.4/Dockerfile30
-rw-r--r--security/nss/automation/taskcluster/docker-gcc-4.4/bin/checkout.sh20
-rw-r--r--security/nss/automation/taskcluster/docker-gcc-4.4/setup.sh30
-rw-r--r--security/nss/automation/taskcluster/docker-hacl/Dockerfile30
-rw-r--r--security/nss/automation/taskcluster/docker-hacl/bin/checkout.sh20
-rw-r--r--security/nss/automation/taskcluster/docker-hacl/license.txt15
-rw-r--r--security/nss/automation/taskcluster/docker-hacl/setup-user.sh26
-rw-r--r--security/nss/automation/taskcluster/docker-hacl/setup.sh30
-rw-r--r--security/nss/automation/taskcluster/docker-saw/Dockerfile46
-rw-r--r--security/nss/automation/taskcluster/docker-saw/LLVMgold.so.zipbin0 -> 13558285 bytes
-rw-r--r--security/nss/automation/taskcluster/docker-saw/bin/checkout.sh15
-rw-r--r--security/nss/automation/taskcluster/docker/setup.sh22
-rw-r--r--security/nss/automation/taskcluster/graph/src/context_hash.js16
-rw-r--r--security/nss/automation/taskcluster/graph/src/extend.js355
-rw-r--r--security/nss/automation/taskcluster/graph/src/image_builder.js13
-rw-r--r--security/nss/automation/taskcluster/graph/src/queue.js5
-rw-r--r--security/nss/automation/taskcluster/graph/src/try_syntax.js12
-rw-r--r--security/nss/automation/taskcluster/image_builder/Dockerfile23
-rw-r--r--security/nss/automation/taskcluster/image_builder/VERSION1
-rw-r--r--security/nss/automation/taskcluster/image_builder/bin/checkout.sh15
-rwxr-xr-xsecurity/nss/automation/taskcluster/scripts/build_gyp.sh9
-rw-r--r--security/nss/automation/taskcluster/scripts/build_image.sh24
-rw-r--r--security/nss/automation/taskcluster/scripts/check_abi.sh172
-rwxr-xr-xsecurity/nss/automation/taskcluster/scripts/gen_certs.sh9
-rw-r--r--security/nss/automation/taskcluster/scripts/run_hacl.sh40
-rw-r--r--security/nss/automation/taskcluster/scripts/run_saw.sh9
-rwxr-xr-xsecurity/nss/automation/taskcluster/scripts/run_scan_build.sh2
-rw-r--r--security/nss/automation/taskcluster/scripts/split.sh6
-rw-r--r--security/nss/automation/taskcluster/windows/releng.manifest8
-rw-r--r--security/nss/automation/taskcluster/windows/setup.sh6
-rw-r--r--security/nss/automation/taskcluster/windows/setup32.sh6
-rw-r--r--security/nss/automation/taskcluster/windows/setup64.sh6
35 files changed, 958 insertions, 75 deletions
diff --git a/security/nss/automation/taskcluster/docker-clang-3.9/setup.sh b/security/nss/automation/taskcluster/docker-clang-3.9/setup.sh
index 7b7d534e6..3076667a6 100644
--- a/security/nss/automation/taskcluster/docker-clang-3.9/setup.sh
+++ b/security/nss/automation/taskcluster/docker-clang-3.9/setup.sh
@@ -25,8 +25,8 @@ apt-get -y update
apt-get install -y --no-install-recommends ${apt_packages[@]}
# Download clang.
-curl -LO http://releases.llvm.org/3.9.1/clang+llvm-3.9.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz
-curl -LO http://releases.llvm.org/3.9.1/clang+llvm-3.9.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz.sig
+curl -LO https://releases.llvm.org/3.9.1/clang+llvm-3.9.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz
+curl -LO https://releases.llvm.org/3.9.1/clang+llvm-3.9.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz.sig
# Verify the signature.
gpg --keyserver pool.sks-keyservers.net --recv-keys B6C8F98282B944E3B0D5C2530FC3042E345AD05D
gpg --verify *.tar.xz.sig
diff --git a/security/nss/automation/taskcluster/docker-decision/Dockerfile b/security/nss/automation/taskcluster/docker-decision/Dockerfile
index 35777c0b7..473ce64ba 100644
--- a/security/nss/automation/taskcluster/docker-decision/Dockerfile
+++ b/security/nss/automation/taskcluster/docker-decision/Dockerfile
@@ -12,6 +12,9 @@ RUN chmod +x /home/worker/bin/*
ADD setup.sh /tmp/setup.sh
RUN bash /tmp/setup.sh
+# Change user.
+USER worker
+
# Env variables.
ENV HOME /home/worker
ENV SHELL /bin/bash
diff --git a/security/nss/automation/taskcluster/docker-decision/bin/checkout.sh b/security/nss/automation/taskcluster/docker-decision/bin/checkout.sh
index 9167f6bda..0cdd2ac40 100644
--- a/security/nss/automation/taskcluster/docker-decision/bin/checkout.sh
+++ b/security/nss/automation/taskcluster/docker-decision/bin/checkout.sh
@@ -2,11 +2,6 @@
set -v -e -x
-if [ $(id -u) = 0 ]; then
- # Drop privileges by re-running this script.
- exec su worker $0
-fi
-
# Default values for testing.
REVISION=${NSS_HEAD_REVISION:-default}
REPOSITORY=${NSS_HEAD_REPOSITORY:-https://hg.mozilla.org/projects/nss}
diff --git a/security/nss/automation/taskcluster/docker-gcc-4.4/Dockerfile b/security/nss/automation/taskcluster/docker-gcc-4.4/Dockerfile
new file mode 100644
index 000000000..3330c007f
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-gcc-4.4/Dockerfile
@@ -0,0 +1,30 @@
+FROM ubuntu:14.04
+MAINTAINER Tim Taubert <ttaubert@mozilla.com>
+
+RUN useradd -d /home/worker -s /bin/bash -m worker
+WORKDIR /home/worker
+
+# Add build and test scripts.
+ADD bin /home/worker/bin
+RUN chmod +x /home/worker/bin/*
+
+# Install dependencies.
+ADD setup.sh /tmp/setup.sh
+RUN bash /tmp/setup.sh
+
+# Change user.
+USER worker
+
+# Env variables.
+ENV HOME /home/worker
+ENV SHELL /bin/bash
+ENV USER worker
+ENV LOGNAME worker
+ENV HOSTNAME taskcluster-worker
+ENV LANG en_US.UTF-8
+ENV LC_ALL en_US.UTF-8
+ENV HOST localhost
+ENV DOMSUF localdomain
+
+# Set a default command for debugging.
+CMD ["/bin/bash", "--login"]
diff --git a/security/nss/automation/taskcluster/docker-gcc-4.4/bin/checkout.sh b/security/nss/automation/taskcluster/docker-gcc-4.4/bin/checkout.sh
new file mode 100644
index 000000000..9167f6bda
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-gcc-4.4/bin/checkout.sh
@@ -0,0 +1,20 @@
+#!/usr/bin/env bash
+
+set -v -e -x
+
+if [ $(id -u) = 0 ]; then
+ # Drop privileges by re-running this script.
+ exec su worker $0
+fi
+
+# Default values for testing.
+REVISION=${NSS_HEAD_REVISION:-default}
+REPOSITORY=${NSS_HEAD_REPOSITORY:-https://hg.mozilla.org/projects/nss}
+
+# Clone NSS.
+for i in 0 2 5; do
+ sleep $i
+ hg clone -r $REVISION $REPOSITORY nss && exit 0
+ rm -rf nss
+done
+exit 1
diff --git a/security/nss/automation/taskcluster/docker-gcc-4.4/setup.sh b/security/nss/automation/taskcluster/docker-gcc-4.4/setup.sh
new file mode 100644
index 000000000..f6325d966
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-gcc-4.4/setup.sh
@@ -0,0 +1,30 @@
+#!/usr/bin/env bash
+
+set -v -e -x
+
+# Update packages.
+export DEBIAN_FRONTEND=noninteractive
+apt-get -y update && apt-get -y upgrade
+
+apt_packages=()
+apt_packages+=('ca-certificates')
+apt_packages+=('g++-4.4')
+apt_packages+=('gcc-4.4')
+apt_packages+=('locales')
+apt_packages+=('make')
+apt_packages+=('mercurial')
+apt_packages+=('zlib1g-dev')
+
+# Install packages.
+apt-get -y update
+apt-get install -y --no-install-recommends ${apt_packages[@]}
+
+locale-gen en_US.UTF-8
+dpkg-reconfigure locales
+
+# Cleanup.
+rm -rf ~/.ccache ~/.cache
+apt-get autoremove -y
+apt-get clean
+apt-get autoclean
+rm $0
diff --git a/security/nss/automation/taskcluster/docker-hacl/Dockerfile b/security/nss/automation/taskcluster/docker-hacl/Dockerfile
new file mode 100644
index 000000000..63f9a24e2
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-hacl/Dockerfile
@@ -0,0 +1,30 @@
+FROM ubuntu:xenial
+
+MAINTAINER Franziskus Kiefer <franziskuskiefer@gmail.com>
+# Based on the HACL* image from Benjamin Beurdouche and
+# the original F* formula with Daniel Fabian
+
+# Pinned versions of HACL* (F* and KreMLin are pinned as submodules)
+ENV haclrepo https://github.com/franziskuskiefer/hacl-star.git
+
+# Define versions of dependencies
+ENV opamv 4.04.2
+ENV haclversion 668d6cf274c33bbe2e951e3a84b73f2b6442a51f
+
+# Install required packages and set versions
+ADD setup.sh /tmp/setup.sh
+RUN bash /tmp/setup.sh
+
+# Create user, add scripts.
+RUN useradd -ms /bin/bash worker
+WORKDIR /home/worker
+ADD bin /home/worker/bin
+RUN chmod +x /home/worker/bin/*
+USER worker
+
+# Build F*, HACL*, verify. Install a few more dependencies.
+ENV OPAMYES true
+ENV PATH "/home/worker/hacl-star/dependencies/z3/bin:$PATH"
+ADD setup-user.sh /tmp/setup-user.sh
+ADD license.txt /tmp/license.txt
+RUN bash /tmp/setup-user.sh
diff --git a/security/nss/automation/taskcluster/docker-hacl/bin/checkout.sh b/security/nss/automation/taskcluster/docker-hacl/bin/checkout.sh
new file mode 100644
index 000000000..9167f6bda
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-hacl/bin/checkout.sh
@@ -0,0 +1,20 @@
+#!/usr/bin/env bash
+
+set -v -e -x
+
+if [ $(id -u) = 0 ]; then
+ # Drop privileges by re-running this script.
+ exec su worker $0
+fi
+
+# Default values for testing.
+REVISION=${NSS_HEAD_REVISION:-default}
+REPOSITORY=${NSS_HEAD_REPOSITORY:-https://hg.mozilla.org/projects/nss}
+
+# Clone NSS.
+for i in 0 2 5; do
+ sleep $i
+ hg clone -r $REVISION $REPOSITORY nss && exit 0
+ rm -rf nss
+done
+exit 1
diff --git a/security/nss/automation/taskcluster/docker-hacl/license.txt b/security/nss/automation/taskcluster/docker-hacl/license.txt
new file mode 100644
index 000000000..03d25c4d3
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-hacl/license.txt
@@ -0,0 +1,15 @@
+/* Copyright 2016-2017 INRIA and Microsoft Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
diff --git a/security/nss/automation/taskcluster/docker-hacl/setup-user.sh b/security/nss/automation/taskcluster/docker-hacl/setup-user.sh
new file mode 100644
index 000000000..b8accaf58
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-hacl/setup-user.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+
+set -v -e -x
+
+# Prepare build (OCaml packages)
+opam init
+echo ". /home/worker/.opam/opam-init/init.sh > /dev/null 2> /dev/null || true" >> .bashrc
+opam switch -v ${opamv}
+opam install ocamlfind batteries sqlite3 fileutils yojson ppx_deriving_yojson zarith pprint menhir ulex process fix wasm stdint
+
+# Get the HACL* code
+git clone ${haclrepo} hacl-star
+git -C hacl-star checkout ${haclversion}
+
+# Prepare submodules, and build, verify, test, and extract c code
+# This caches the extracted c code (pins the HACL* version). All we need to do
+# on CI now is comparing the code in this docker image with the one in NSS.
+opam config exec -- make -C hacl-star prepare -j$(nproc)
+make -C hacl-star verify-nss -j$(nproc)
+make -C hacl-star -f Makefile.build snapshots/nss -j$(nproc)
+KOPTS="-funroll-loops 5" make -C hacl-star/code/curve25519 test -j$(nproc)
+make -C hacl-star/code/salsa-family test -j$(nproc)
+make -C hacl-star/code/poly1305 test -j$(nproc)
+
+# Cleanup.
+rm -rf ~/.ccache ~/.cache
diff --git a/security/nss/automation/taskcluster/docker-hacl/setup.sh b/security/nss/automation/taskcluster/docker-hacl/setup.sh
new file mode 100644
index 000000000..f5f8bd7d5
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-hacl/setup.sh
@@ -0,0 +1,30 @@
+#!/usr/bin/env bash
+
+set -v -e -x
+
+# Update packages.
+export DEBIAN_FRONTEND=noninteractive
+apt-get -qq update
+apt-get install --yes libssl-dev libsqlite3-dev g++-5 gcc-5 m4 make opam pkg-config python libgmp3-dev cmake curl libtool-bin autoconf wget locales
+update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-5 200
+update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-5 200
+
+# Get clang-format-3.9
+curl -LO https://releases.llvm.org/3.9.1/clang+llvm-3.9.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz
+curl -LO https://releases.llvm.org/3.9.1/clang+llvm-3.9.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz.sig
+# Verify the signature.
+gpg --keyserver pool.sks-keyservers.net --recv-keys B6C8F98282B944E3B0D5C2530FC3042E345AD05D
+gpg --verify *.tar.xz.sig
+# Install into /usr/local/.
+tar xJvf *.tar.xz -C /usr/local --strip-components=1
+# Cleanup.
+rm *.tar.xz*
+
+locale-gen en_US.UTF-8
+dpkg-reconfigure locales
+
+# Cleanup.
+rm -rf ~/.ccache ~/.cache
+apt-get autoremove -y
+apt-get clean
+apt-get autoclean
diff --git a/security/nss/automation/taskcluster/docker-saw/Dockerfile b/security/nss/automation/taskcluster/docker-saw/Dockerfile
new file mode 100644
index 000000000..a481ba048
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-saw/Dockerfile
@@ -0,0 +1,46 @@
+FROM ubuntu:latest
+MAINTAINER Tim Taubert <ttaubert@mozilla.com>
+
+RUN useradd -d /home/worker -s /bin/bash -m worker
+WORKDIR /home/worker
+
+ENV DEBIAN_FRONTEND noninteractive
+
+RUN apt-get update && apt-get install -y \
+ binutils \
+ build-essential \
+ bzip2 \
+ clang-3.8 \
+ curl \
+ gcc-multilib \
+ g++-multilib \
+ gyp \
+ lib32z1-dev \
+ mercurial \
+ ninja-build \
+ unzip \
+ zlib1g-dev
+
+# Add missing LLVM plugin for gold linker.
+ADD LLVMgold.so.zip /usr/lib/llvm-3.8/lib/LLVMgold.so.zip
+RUN unzip /usr/lib/llvm-3.8/lib/LLVMgold.so.zip -d /usr/lib/llvm-3.8/lib/
+
+# Install SAW/Cryptol.
+RUN curl -LO https://saw.galois.com/builds/nightly/saw-0.2-2018-01-14-Ubuntu14.04-64.tar.gz && \
+ tar xzvf saw-*.tar.gz -C /usr/local --strip-components=1 && \
+ rm saw-*.tar.gz
+
+# Install Z3.
+RUN curl -LO https://github.com/Z3Prover/z3/releases/download/z3-4.6.0/z3-4.6.0-x64-ubuntu-16.04.zip && \
+ unzip z3*.zip && \
+ cp -r z3*/* /usr/local/ && \
+ rm -fr z3*
+
+ADD bin /home/worker/bin
+RUN chmod +x /home/worker/bin/*
+
+# Change user.
+USER worker
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
diff --git a/security/nss/automation/taskcluster/docker-saw/LLVMgold.so.zip b/security/nss/automation/taskcluster/docker-saw/LLVMgold.so.zip
new file mode 100644
index 000000000..b5e5a593d
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-saw/LLVMgold.so.zip
Binary files differ
diff --git a/security/nss/automation/taskcluster/docker-saw/bin/checkout.sh b/security/nss/automation/taskcluster/docker-saw/bin/checkout.sh
new file mode 100644
index 000000000..0cdd2ac40
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-saw/bin/checkout.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env bash
+
+set -v -e -x
+
+# Default values for testing.
+REVISION=${NSS_HEAD_REVISION:-default}
+REPOSITORY=${NSS_HEAD_REPOSITORY:-https://hg.mozilla.org/projects/nss}
+
+# Clone NSS.
+for i in 0 2 5; do
+ sleep $i
+ hg clone -r $REVISION $REPOSITORY nss && exit 0
+ rm -rf nss
+done
+exit 1
diff --git a/security/nss/automation/taskcluster/docker/setup.sh b/security/nss/automation/taskcluster/docker/setup.sh
index 3ba4e854e..7b90b2e69 100644
--- a/security/nss/automation/taskcluster/docker/setup.sh
+++ b/security/nss/automation/taskcluster/docker/setup.sh
@@ -12,6 +12,7 @@ apt-get install -y --no-install-recommends apt-utils
apt_packages=()
apt_packages+=('build-essential')
apt_packages+=('ca-certificates')
+apt_packages+=('clang-5.0')
apt_packages+=('curl')
apt_packages+=('npm')
apt_packages+=('git')
@@ -47,16 +48,17 @@ echo "deb http://ppa.launchpad.net/ubuntu-toolchain-r/test/ubuntu xenial main" >
apt-get -y update
apt-get install -y --no-install-recommends ${apt_packages[@]}
-# Download clang.
-curl -LO http://releases.llvm.org/4.0.0/clang+llvm-4.0.0-x86_64-linux-gnu-ubuntu-16.04.tar.xz
-curl -LO http://releases.llvm.org/4.0.0/clang+llvm-4.0.0-x86_64-linux-gnu-ubuntu-16.04.tar.xz.sig
-# Verify the signature.
-gpg --keyserver pool.sks-keyservers.net --recv-keys B6C8F98282B944E3B0D5C2530FC3042E345AD05D
-gpg --verify *.tar.xz.sig
-# Install into /usr/local/.
-tar xJvf *.tar.xz -C /usr/local --strip-components=1
-# Cleanup.
-rm *.tar.xz*
+# Latest version of abigail-tools
+apt-get install -y libxml2-dev autoconf libelf-dev libdw-dev libtool
+git clone git://sourceware.org/git/libabigail.git
+cd ./libabigail
+autoreconf -fi
+./configure --prefix=/usr --disable-static --disable-apidoc --disable-manual
+make
+make install
+cd ..
+apt-get remove -y libxml2-dev autoconf libtool
+rm -rf libabigail
# Install latest Rust (stable).
su worker -c "curl https://sh.rustup.rs -sSf | sh -s -- -y"
diff --git a/security/nss/automation/taskcluster/graph/src/context_hash.js b/security/nss/automation/taskcluster/graph/src/context_hash.js
index f0a2e9a88..0699a0590 100644
--- a/security/nss/automation/taskcluster/graph/src/context_hash.js
+++ b/security/nss/automation/taskcluster/graph/src/context_hash.js
@@ -27,14 +27,24 @@ function collectFilesInDirectory(dir) {
});
}
-// Compute a context hash for the given context path.
-export default function (context_path) {
+// A list of hashes for each file in the given path.
+function collectFileHashes(context_path) {
let root = path.join(__dirname, "../../../..");
let dir = path.join(root, context_path);
let files = collectFilesInDirectory(dir).sort();
- let hashes = files.map(file => {
+
+ return files.map(file => {
return sha256(file + "|" + fs.readFileSync(file, "utf-8"));
});
+}
+
+// Compute a context hash for the given context path.
+export default function (context_path) {
+ // Regenerate all images when the image_builder changes.
+ let hashes = collectFileHashes("automation/taskcluster/image_builder");
+
+ // Regenerate images when the image itself changes.
+ hashes = hashes.concat(collectFileHashes(context_path));
// Generate a new prefix every month to ensure the image stays buildable.
let now = new Date();
diff --git a/security/nss/automation/taskcluster/graph/src/extend.js b/security/nss/automation/taskcluster/graph/src/extend.js
index d541a1a3b..ee9ac9b74 100644
--- a/security/nss/automation/taskcluster/graph/src/extend.js
+++ b/security/nss/automation/taskcluster/graph/src/extend.js
@@ -15,15 +15,34 @@ const LINUX_CLANG39_IMAGE = {
path: "automation/taskcluster/docker-clang-3.9"
};
+const LINUX_GCC44_IMAGE = {
+ name: "linux-gcc-4.4",
+ path: "automation/taskcluster/docker-gcc-4.4"
+};
+
const FUZZ_IMAGE = {
name: "fuzz",
path: "automation/taskcluster/docker-fuzz"
};
+const HACL_GEN_IMAGE = {
+ name: "hacl",
+ path: "automation/taskcluster/docker-hacl"
+};
+
+const SAW_IMAGE = {
+ name: "saw",
+ path: "automation/taskcluster/docker-saw"
+};
+
const WINDOWS_CHECKOUT_CMD =
"bash -c \"hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss || " +
"(sleep 2; hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss) || " +
"(sleep 5; hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss)\"";
+const MAC_CHECKOUT_CMD = ["bash", "-c",
+ "hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss || " +
+ "(sleep 2; hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss) || " +
+ "(sleep 5; hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss)"];
/*****************************************************************************/
@@ -51,6 +70,16 @@ queue.filter(task => {
if (task.platform == "aarch64") {
return false;
}
+
+ // No mac
+ if (task.platform == "mac") {
+ return false;
+ }
+ }
+
+ if (task.tests == "fips" &&
+ (task.platform == "mac" || task.platform == "aarch64")) {
+ return false;
}
// Only old make builds have -Ddisable_libpkix=0 and can run chain tests.
@@ -59,13 +88,13 @@ queue.filter(task => {
}
if (task.group == "Test") {
- // Don't run test builds on old make platforms
- if (task.collection == "make") {
+ // Don't run test builds on old make platforms, and not for fips gyp.
+ if (task.collection == "make" || task.collection == "fips") {
return false;
}
}
- // Don't run additional hardware tests on ARM (we don't have anything there).
+ // Don't run all additional hardware tests on ARM.
if (task.group == "Cipher" && task.platform == "aarch64" && task.env &&
(task.env.NSS_DISABLE_PCLMUL == "1" || task.env.NSS_DISABLE_HW_AES == "1"
|| task.env.NSS_DISABLE_AVX == "1")) {
@@ -78,11 +107,19 @@ queue.filter(task => {
queue.map(task => {
if (task.collection == "asan") {
// CRMF and FIPS tests still leak, unfortunately.
- if (task.tests == "crmf" || task.tests == "fips") {
+ if (task.tests == "crmf") {
task.env.ASAN_OPTIONS = "detect_leaks=0";
}
}
+ // We don't run FIPS SSL tests
+ if (task.tests == "ssl") {
+ if (!task.env) {
+ task.env = {};
+ }
+ task.env.NSS_SSL_TESTS = "crl iopr policy";
+ }
+
// Windows is slow.
if (task.platform == "windows2012-64" && task.tests == "chains") {
task.maxRunTime = 7200;
@@ -128,6 +165,18 @@ export default async function main() {
],
});
+ await scheduleLinux("Linux 64 (opt, make)", {
+ env: {USE_64: "1", BUILD_OPT: "1"},
+ platform: "linux64",
+ image: LINUX_IMAGE,
+ collection: "make",
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/build.sh"
+ ],
+ });
+
await scheduleLinux("Linux 32 (debug, make)", {
platform: "linux32",
image: LINUX_IMAGE,
@@ -144,8 +193,8 @@ export default async function main() {
UBSAN_OPTIONS: "print_stacktrace=1",
NSS_DISABLE_ARENA_FREE_LIST: "1",
NSS_DISABLE_UNLOAD: "1",
- CC: "clang",
- CCC: "clang++",
+ CC: "clang-5.0",
+ CCC: "clang++-5.0",
},
platform: "linux64",
collection: "asan",
@@ -153,6 +202,12 @@ export default async function main() {
features: ["allowPtrace"],
}, "--ubsan --asan");
+ await scheduleLinux("Linux 64 (FIPS opt)", {
+ platform: "linux64",
+ collection: "fips",
+ image: LINUX_IMAGE,
+ }, "--enable-fips --opt");
+
await scheduleWindows("Windows 2012 64 (debug, make)", {
platform: "windows2012-64",
collection: "make",
@@ -216,6 +271,82 @@ export default async function main() {
collection: "opt",
}, aarch64_base)
);
+
+ await scheduleLinux("Linux AArch64 (debug, make)",
+ merge({
+ env: {USE_64: "1"},
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/build.sh"
+ ],
+ collection: "make",
+ }, aarch64_base)
+ );
+
+ await scheduleMac("Mac (opt)", {collection: "opt"}, "--opt");
+ await scheduleMac("Mac (debug)", {collection: "debug"});
+}
+
+
+async function scheduleMac(name, base, args = "") {
+ let mac_base = merge(base, {
+ env: {
+ PATH: "/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin",
+ NSS_TASKCLUSTER_MAC: "1",
+ DOMSUF: "localdomain",
+ HOST: "localhost",
+ },
+ provisioner: "localprovisioner",
+ workerType: "nss-macos-10-12",
+ platform: "mac"
+ });
+
+ // Build base definition.
+ let build_base = merge({
+ command: [
+ MAC_CHECKOUT_CMD,
+ ["bash", "-c",
+ "nss/automation/taskcluster/scripts/build_gyp.sh", args]
+ ],
+ provisioner: "localprovisioner",
+ workerType: "nss-macos-10-12",
+ platform: "mac",
+ maxRunTime: 7200,
+ artifacts: [{
+ expires: 24 * 7,
+ type: "directory",
+ path: "public"
+ }],
+ kind: "build",
+ symbol: "B"
+ }, mac_base);
+
+ // The task that builds NSPR+NSS.
+ let task_build = queue.scheduleTask(merge(build_base, {name}));
+
+ // The task that generates certificates.
+ let task_cert = queue.scheduleTask(merge(build_base, {
+ name: "Certificates",
+ command: [
+ MAC_CHECKOUT_CMD,
+ ["bash", "-c",
+ "nss/automation/taskcluster/scripts/gen_certs.sh"]
+ ],
+ parent: task_build,
+ symbol: "Certs"
+ }));
+
+ // Schedule tests.
+ scheduleTests(task_build, task_cert, merge(mac_base, {
+ command: [
+ MAC_CHECKOUT_CMD,
+ ["bash", "-c",
+ "nss/automation/taskcluster/scripts/run_tests.sh"]
+ ]
+ }));
+
+ return queue.submit();
}
/*****************************************************************************/
@@ -242,6 +373,45 @@ async function scheduleLinux(name, base, args = "") {
// The task that builds NSPR+NSS.
let task_build = queue.scheduleTask(merge(build_base, {name}));
+ // Make builds run FIPS tests, which need an extra FIPS build.
+ if (base.collection == "make") {
+ let extra_build = queue.scheduleTask(merge(build_base, {
+ env: { NSS_FORCE_FIPS: "1" },
+ group: "FIPS",
+ name: `${name} w/ NSS_FORCE_FIPS`
+ }));
+
+ // The task that generates certificates.
+ let task_cert = queue.scheduleTask(merge(build_base, {
+ name: "Certificates",
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/gen_certs.sh"
+ ],
+ parent: extra_build,
+ symbol: "Certs-F",
+ group: "FIPS",
+ }));
+
+ // Schedule FIPS tests.
+ queue.scheduleTask(merge(base, {
+ parent: task_cert,
+ name: "FIPS",
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/run_tests.sh"
+ ],
+ cycle: "standard",
+ kind: "test",
+ name: "FIPS tests",
+ symbol: "Tests-F",
+ tests: "fips",
+ group: "FIPS"
+ }));
+ }
+
// The task that generates certificates.
let task_cert = queue.scheduleTask(merge(build_base, {
name: "Certificates",
@@ -266,12 +436,32 @@ async function scheduleLinux(name, base, args = "") {
// Extra builds.
let extra_base = merge({group: "Builds"}, build_base);
queue.scheduleTask(merge(extra_base, {
- name: `${name} w/ clang-4.0`,
+ name: `${name} w/ clang-5.0`,
env: {
- CC: "clang",
- CCC: "clang++",
+ CC: "clang-5.0",
+ CCC: "clang++-5.0",
},
- symbol: "clang-4.0"
+ symbol: "clang-5.0"
+ }));
+
+ queue.scheduleTask(merge(extra_base, {
+ name: `${name} w/ gcc-4.4`,
+ image: LINUX_GCC44_IMAGE,
+ env: {
+ USE_64: "1",
+ CC: "gcc-4.4",
+ CCC: "g++-4.4",
+ // gcc-4.6 introduced nullptr.
+ NSS_DISABLE_GTESTS: "1",
+ },
+ // Use the old Makefile-based build system, GYP doesn't have a proper GCC
+ // version check for __int128 support. It's mainly meant to cover RHEL6.
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/build.sh",
+ ],
+ symbol: "gcc-4.4"
}));
queue.scheduleTask(merge(extra_base, {
@@ -403,12 +593,13 @@ async function scheduleFuzzing() {
// Schedule MPI fuzzing runs.
let mpi_base = merge(run_base, {group: "MPI"});
- let mpi_names = ["add", "addmod", "div", "expmod", "mod", "mulmod", "sqr",
+ let mpi_names = ["add", "addmod", "div", "mod", "mulmod", "sqr",
"sqrmod", "sub", "submod"];
for (let name of mpi_names) {
scheduleFuzzingRun(mpi_base, `MPI (${name})`, `mpi-${name}`, 4096, name);
}
scheduleFuzzingRun(mpi_base, `MPI (invmod)`, `mpi-invmod`, 256, "invmod");
+ scheduleFuzzingRun(mpi_base, `MPI (expmod)`, `mpi-expmod`, 2048, "expmod");
// Schedule TLS fuzzing runs (non-fuzzing mode).
let tls_base = merge(run_base, {group: "TLS"});
@@ -625,6 +816,43 @@ async function scheduleWindows(name, base, build_script) {
symbol: "B"
});
+ // Make builds run FIPS tests, which need an extra FIPS build.
+ if (base.collection == "make") {
+ let extra_build = queue.scheduleTask(merge(build_base, {
+ env: { NSS_FORCE_FIPS: "1" },
+ group: "FIPS",
+ name: `${name} w/ NSS_FORCE_FIPS`
+ }));
+
+ // The task that generates certificates.
+ let task_cert = queue.scheduleTask(merge(build_base, {
+ name: "Certificates",
+ command: [
+ WINDOWS_CHECKOUT_CMD,
+ "bash -c nss/automation/taskcluster/windows/gen_certs.sh"
+ ],
+ parent: extra_build,
+ symbol: "Certs-F",
+ group: "FIPS",
+ }));
+
+ // Schedule FIPS tests.
+ queue.scheduleTask(merge(base, {
+ parent: task_cert,
+ name: "FIPS",
+ command: [
+ WINDOWS_CHECKOUT_CMD,
+ "bash -c nss/automation/taskcluster/windows/run_tests.sh"
+ ],
+ cycle: "standard",
+ kind: "test",
+ name: "FIPS tests",
+ symbol: "Tests-F",
+ tests: "fips",
+ group: "FIPS"
+ }));
+ }
+
// The task that builds NSPR+NSS.
let task_build = queue.scheduleTask(merge(build_base, {name}));
@@ -685,6 +913,13 @@ function scheduleTests(task_build, task_cert, test_base) {
env: {NSS_DISABLE_AVX: "1"}, group: "Cipher"
}));
queue.scheduleTask(merge(no_cert_base, {
+ name: "Cipher tests", symbol: "NoSSSE3|NEON", tests: "cipher",
+ env: {
+ NSS_DISABLE_ARM_NEON: "1",
+ NSS_DISABLE_SSSE3: "1"
+ }, group: "Cipher"
+ }));
+ queue.scheduleTask(merge(no_cert_base, {
name: "EC tests", symbol: "EC", tests: "ec"
}));
queue.scheduleTask(merge(no_cert_base, {
@@ -703,9 +938,6 @@ function scheduleTests(task_build, task_cert, test_base) {
name: "DB tests", symbol: "DB", tests: "dbtests"
}));
queue.scheduleTask(merge(cert_base, {
- name: "FIPS tests", symbol: "FIPS", tests: "fips"
- }));
- queue.scheduleTask(merge(cert_base, {
name: "Merge tests", symbol: "Merge", tests: "merge"
}));
queue.scheduleTask(merge(cert_base, {
@@ -739,6 +971,18 @@ async function scheduleTools() {
kind: "test"
};
+ //ABI check task
+ queue.scheduleTask(merge(base, {
+ symbol: "abi",
+ name: "abi",
+ image: LINUX_IMAGE,
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/check_abi.sh"
+ ],
+ }));
+
queue.scheduleTask(merge(base, {
symbol: "clang-format-3.9",
name: "clang-format-3.9",
@@ -751,13 +995,13 @@ async function scheduleTools() {
}));
queue.scheduleTask(merge(base, {
- symbol: "scan-build-4.0",
- name: "scan-build-4.0",
+ symbol: "scan-build-5.0",
+ name: "scan-build-5.0",
image: LINUX_IMAGE,
env: {
USE_64: "1",
- CC: "clang",
- CCC: "clang++",
+ CC: "clang-5.0",
+ CCC: "clang++-5.0",
},
artifacts: {
public: {
@@ -773,5 +1017,80 @@ async function scheduleTools() {
]
}));
+ queue.scheduleTask(merge(base, {
+ symbol: "hacl",
+ name: "hacl",
+ image: HACL_GEN_IMAGE,
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/run_hacl.sh"
+ ]
+ }));
+
+ let task_saw = queue.scheduleTask(merge(base, {
+ symbol: "B",
+ group: "SAW",
+ name: "LLVM bitcode build (32 bit)",
+ image: SAW_IMAGE,
+ kind: "build",
+ env: {
+ AR: "llvm-ar-3.8",
+ CC: "clang-3.8",
+ CCC: "clang++-3.8"
+ },
+ artifacts: {
+ public: {
+ expires: 24 * 7,
+ type: "directory",
+ path: "/home/worker/artifacts"
+ }
+ },
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/build_gyp.sh --disable-tests --emit-llvm -m32"
+ ]
+ }));
+
+ queue.scheduleTask(merge(base, {
+ parent: task_saw,
+ symbol: "bmul",
+ group: "SAW",
+ name: "bmul.saw",
+ image: SAW_IMAGE,
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/run_saw.sh bmul"
+ ]
+ }));
+
+ queue.scheduleTask(merge(base, {
+ parent: task_saw,
+ symbol: "ChaCha20",
+ group: "SAW",
+ name: "chacha20.saw",
+ image: SAW_IMAGE,
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/run_saw.sh chacha20"
+ ]
+ }));
+
+ queue.scheduleTask(merge(base, {
+ parent: task_saw,
+ symbol: "Poly1305",
+ group: "SAW",
+ name: "poly1305.saw",
+ image: SAW_IMAGE,
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/run_saw.sh poly1305"
+ ]
+ }));
+
return queue.submit();
}
diff --git a/security/nss/automation/taskcluster/graph/src/image_builder.js b/security/nss/automation/taskcluster/graph/src/image_builder.js
index bc90e0242..d9d7755dc 100644
--- a/security/nss/automation/taskcluster/graph/src/image_builder.js
+++ b/security/nss/automation/taskcluster/graph/src/image_builder.js
@@ -30,14 +30,12 @@ export async function buildTask({name, path}) {
let ns = `docker.images.v1.${process.env.TC_PROJECT}.${name}.hash.${hash}`;
return {
- name: "Image Builder",
- image: "taskcluster/image_builder:0.1.5",
+ name: `Image Builder (${name})`,
+ image: "nssdev/image_builder:0.1.5",
routes: ["index." + ns],
env: {
- HEAD_REPOSITORY: process.env.NSS_HEAD_REPOSITORY,
- BASE_REPOSITORY: process.env.NSS_HEAD_REPOSITORY,
- HEAD_REV: process.env.NSS_HEAD_REVISION,
- HEAD_REF: process.env.NSS_HEAD_REVISION,
+ NSS_HEAD_REPOSITORY: process.env.NSS_HEAD_REPOSITORY,
+ NSS_HEAD_REVISION: process.env.NSS_HEAD_REVISION,
PROJECT: process.env.TC_PROJECT,
CONTEXT_PATH: path,
HASH: hash
@@ -52,10 +50,11 @@ export async function buildTask({name, path}) {
command: [
"/bin/bash",
"-c",
- "/home/worker/bin/build_image.sh"
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/build_image.sh"
],
platform: "nss-decision",
features: ["dind"],
+ maxRunTime: 7200,
kind: "build",
symbol: "I"
};
diff --git a/security/nss/automation/taskcluster/graph/src/queue.js b/security/nss/automation/taskcluster/graph/src/queue.js
index 29b570729..809a17bf1 100644
--- a/security/nss/automation/taskcluster/graph/src/queue.js
+++ b/security/nss/automation/taskcluster/graph/src/queue.js
@@ -31,10 +31,11 @@ function parseRoutes(routes) {
];
// Notify about failures (except on try).
- if (process.env.TC_PROJECT != "nss-try") {
+ // Turned off, too noisy.
+ /*if (process.env.TC_PROJECT != "nss-try") {
rv.push(`notify.email.${process.env.TC_OWNER}.on-failed`,
`notify.email.${process.env.TC_OWNER}.on-exception`);
- }
+ }*/
return rv;
}
diff --git a/security/nss/automation/taskcluster/graph/src/try_syntax.js b/security/nss/automation/taskcluster/graph/src/try_syntax.js
index 7748e068a..1c06dde13 100644
--- a/security/nss/automation/taskcluster/graph/src/try_syntax.js
+++ b/security/nss/automation/taskcluster/graph/src/try_syntax.js
@@ -22,10 +22,10 @@ function parseOptions(opts) {
}
// Parse platforms.
- let allPlatforms = ["linux", "linux64", "linux64-asan",
+ let allPlatforms = ["linux", "linux64", "linux64-asan", "linux64-fips",
"win", "win64", "win-make", "win64-make",
"linux64-make", "linux-make", "linux-fuzz",
- "linux64-fuzz", "aarch64"];
+ "linux64-fuzz", "aarch64", "mac"];
let platforms = intersect(opts.platform.split(/\s*,\s*/), allPlatforms);
// If the given value is nonsense or "none" default to all platforms.
@@ -51,7 +51,7 @@ function parseOptions(opts) {
}
// Parse tools.
- let allTools = ["clang-format", "scan-build"];
+ let allTools = ["clang-format", "scan-build", "hacl", "saw", "abi"];
let tools = intersect(opts.tools.split(/\s*,\s*/), allTools);
// If the given value is "all" run all tools.
@@ -77,7 +77,8 @@ function filter(opts) {
// are not affected by platform or build type selectors.
if (task.platform == "nss-tools") {
return opts.tools.some(tool => {
- return task.symbol.toLowerCase().startsWith(tool);
+ return task.symbol.toLowerCase().startsWith(tool) ||
+ (task.group && task.group.toLowerCase().startsWith(tool));
});
}
@@ -111,6 +112,7 @@ function filter(opts) {
"linux": "linux32",
"linux-fuzz": "linux32",
"linux64-asan": "linux64",
+ "linux64-fips": "linux64",
"linux64-fuzz": "linux64",
"linux64-make": "linux64",
"linux-make": "linux32",
@@ -126,6 +128,8 @@ function filter(opts) {
// Additional checks.
if (platform == "linux64-asan") {
keep &= coll("asan");
+ } else if (platform == "linux64-fips") {
+ keep &= coll("fips");
} else if (platform == "linux64-make" || platform == "linux-make" ||
platform == "win64-make" || platform == "win-make") {
keep &= coll("make");
diff --git a/security/nss/automation/taskcluster/image_builder/Dockerfile b/security/nss/automation/taskcluster/image_builder/Dockerfile
new file mode 100644
index 000000000..f8b4edcc5
--- /dev/null
+++ b/security/nss/automation/taskcluster/image_builder/Dockerfile
@@ -0,0 +1,23 @@
+FROM ubuntu:16.04
+MAINTAINER Tim Taubert <ttaubert@mozilla.com>
+
+WORKDIR /home/worker
+
+ENV DEBIAN_FRONTEND noninteractive
+
+RUN apt-get update && apt-get install -y apt-transport-https apt-utils
+RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 36A1D7869245C8950F966E92D8576A8BA88D21E9 && \
+ sh -c "echo deb https://get.docker.io/ubuntu docker main \
+ > /etc/apt/sources.list.d/docker.list"
+RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 41BD8711B1F0EC2B0D85B91CF59CE3A8323293EE && \
+ sh -c "echo deb http://ppa.launchpad.net/mercurial-ppa/releases/ubuntu xenial main \
+ > /etc/apt/sources.list.d/mercurial.list"
+RUN apt-get update && apt-get install -y \
+ lxc-docker-1.6.1 \
+ mercurial
+
+ADD bin /home/worker/bin
+RUN chmod +x /home/worker/bin/*
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
diff --git a/security/nss/automation/taskcluster/image_builder/VERSION b/security/nss/automation/taskcluster/image_builder/VERSION
new file mode 100644
index 000000000..9faa1b7a7
--- /dev/null
+++ b/security/nss/automation/taskcluster/image_builder/VERSION
@@ -0,0 +1 @@
+0.1.5
diff --git a/security/nss/automation/taskcluster/image_builder/bin/checkout.sh b/security/nss/automation/taskcluster/image_builder/bin/checkout.sh
new file mode 100644
index 000000000..0cdd2ac40
--- /dev/null
+++ b/security/nss/automation/taskcluster/image_builder/bin/checkout.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env bash
+
+set -v -e -x
+
+# Default values for testing.
+REVISION=${NSS_HEAD_REVISION:-default}
+REPOSITORY=${NSS_HEAD_REPOSITORY:-https://hg.mozilla.org/projects/nss}
+
+# Clone NSS.
+for i in 0 2 5; do
+ sleep $i
+ hg clone -r $REVISION $REPOSITORY nss && exit 0
+ rm -rf nss
+done
+exit 1
diff --git a/security/nss/automation/taskcluster/scripts/build_gyp.sh b/security/nss/automation/taskcluster/scripts/build_gyp.sh
index 7190bd5c4..fb3a33a52 100755
--- a/security/nss/automation/taskcluster/scripts/build_gyp.sh
+++ b/security/nss/automation/taskcluster/scripts/build_gyp.sh
@@ -9,5 +9,10 @@ hg_clone https://hg.mozilla.org/projects/nspr ./nspr default
nss/build.sh -g -v "$@"
# Package.
-mkdir artifacts
-tar cvfjh artifacts/dist.tar.bz2 dist
+if [[ $(uname) = "Darwin" ]]; then
+ mkdir -p public
+ tar cvfjh public/dist.tar.bz2 dist
+else
+ mkdir artifacts
+ tar cvfjh artifacts/dist.tar.bz2 dist
+fi
diff --git a/security/nss/automation/taskcluster/scripts/build_image.sh b/security/nss/automation/taskcluster/scripts/build_image.sh
new file mode 100644
index 000000000..b422214e7
--- /dev/null
+++ b/security/nss/automation/taskcluster/scripts/build_image.sh
@@ -0,0 +1,24 @@
+#!/bin/bash -vex
+
+set -x -e -v
+
+# Prefix errors with taskcluster error prefix so that they are parsed by Treeherder
+raise_error() {
+ echo
+ echo "[taskcluster-image-build:error] $1"
+ exit 1
+}
+
+# Ensure that the PROJECT is specified so the image can be indexed
+test -n "$PROJECT" || raise_error "Project must be provided."
+test -n "$HASH" || raise_error "Context Hash must be provided."
+
+CONTEXT_PATH=/home/worker/nss/$CONTEXT_PATH
+
+test -d $CONTEXT_PATH || raise_error "Context Path $CONTEXT_PATH does not exist."
+test -f "$CONTEXT_PATH/Dockerfile" || raise_error "Dockerfile must be present in $CONTEXT_PATH."
+
+docker build -t $PROJECT:$HASH $CONTEXT_PATH
+
+mkdir /artifacts
+docker save $PROJECT:$HASH > /artifacts/image.tar
diff --git a/security/nss/automation/taskcluster/scripts/check_abi.sh b/security/nss/automation/taskcluster/scripts/check_abi.sh
new file mode 100644
index 000000000..dbc1a476f
--- /dev/null
+++ b/security/nss/automation/taskcluster/scripts/check_abi.sh
@@ -0,0 +1,172 @@
+#! /bin/bash
+
+set_env()
+{
+ cd /home/worker
+ HGDIR=/home/worker
+ OUTPUTDIR=$(pwd)$(echo "/output")
+ DATE=$(date "+TB [%Y-%m-%d %H:%M:%S]")
+
+ if [ ! -d "${OUTPUTDIR}" ]; then
+ echo "Creating output dir"
+ mkdir "${OUTPUTDIR}"
+ fi
+
+ if [ ! -d "nspr" ]; then
+ for i in 0 2 5; do
+ sleep $i
+ hg clone -r "default" "https://hg.mozilla.org/projects/nspr" "${HGDIR}/nspr" && break
+ rm -rf nspr
+ done
+ fi
+
+ cd nss
+ ./build.sh -v -c
+ cd ..
+}
+
+check_abi()
+{
+ set_env
+ set +e #reverses set -e from build.sh to allow possible hg clone failures
+ if [[ "$1" != --nobuild ]]; then # Start nobuild block
+
+ echo "######## NSS ABI CHECK ########"
+ echo "######## creating temporary HG clones ########"
+
+ rm -rf ${HGDIR}/baseline
+ mkdir ${HGDIR}/baseline
+ BASE_NSS=`cat ${HGDIR}/nss/automation/abi-check/previous-nss-release` #Reads the version number of the last release from the respective file
+ NSS_CLONE_RESULT=0
+ for i in 0 2 5; do
+ sleep $i
+ hg clone -u "${BASE_NSS}" "https://hg.mozilla.org/projects/nss" "${HGDIR}/baseline/nss"
+ if [ $? -eq 0 ]; then
+ NSS_CLONE_RESULT=0
+ break
+ fi
+ rm -rf "${HGDIR}/baseline/nss"
+ NSS_CLONE_RESULT=1
+ done
+ if [ ${NSS_CLONE_RESULT} -ne 0 ]; then
+ echo "invalid tag in automation/abi-check/previous-nss-release"
+ return 1
+ fi
+
+ BASE_NSPR=NSPR_$(head -1 ${HGDIR}/baseline/nss/automation/release/nspr-version.txt | cut -d . -f 1-2 | tr . _)_BRANCH
+ hg clone -u "${BASE_NSPR}" "https://hg.mozilla.org/projects/nspr" "${HGDIR}/baseline/nspr"
+ NSPR_CLONE_RESULT=$?
+
+ if [ ${NSPR_CLONE_RESULT} -ne 0 ]; then
+ rm -rf "${HGDIR}/baseline/nspr"
+ for i in 0 2 5; do
+ sleep $i
+ hg clone -u "default" "https://hg.mozilla.org/projects/nspr" "${HGDIR}/baseline/nspr" && break
+ rm -rf "${HGDIR}/baseline/nspr"
+ done
+ echo "Nonexisting tag ${BASE_NSPR} derived from ${BASE_NSS} automation/release/nspr-version.txt"
+ echo "Using default branch instead."
+ fi
+
+ echo "######## building baseline NSPR/NSS ########"
+ echo "${HGDIR}/baseline/nss/build.sh"
+ cd ${HGDIR}/baseline/nss
+ ./build.sh -v -c
+ cd ${HGDIR}
+ else # Else nobuild block
+ echo "######## using existing baseline NSPR/NSS build ########"
+ fi # End nobuild block
+
+ set +e #reverses set -e from build.sh to allow abidiff failures
+
+ echo "######## Starting abidiff procedure ########"
+ abi_diff
+}
+
+#Slightly modified from builbot-slave/build.sh
+abi_diff()
+{
+ ABI_PROBLEM_FOUND=0
+ ABI_REPORT=${OUTPUTDIR}/abi-diff.txt
+ rm -f ${ABI_REPORT}
+ PREVDIST=${HGDIR}/baseline/dist
+ NEWDIST=${HGDIR}/dist
+ ALL_SOs="libfreebl3.so libfreeblpriv3.so libnspr4.so libnss3.so libnssckbi.so libnssdbm3.so libnsssysinit.so libnssutil3.so libplc4.so libplds4.so libsmime3.so libsoftokn3.so libssl3.so"
+ for SO in ${ALL_SOs}; do
+ if [ ! -f ${HGDIR}/nss/automation/abi-check/expected-report-$SO.txt ]; then
+ touch ${HGDIR}/nss/automation/abi-check/expected-report-$SO.txt
+ fi
+ abidiff --hd1 $PREVDIST/public/ --hd2 $NEWDIST/public \
+ $PREVDIST/*/lib/$SO $NEWDIST/*/lib/$SO \
+ > ${HGDIR}/nss/automation/abi-check/new-report-temp$SO.txt
+ RET=$?
+ cat ${HGDIR}/nss/automation/abi-check/new-report-temp$SO.txt \
+ | grep -v "^Functions changes summary:" \
+ | grep -v "^Variables changes summary:" \
+ > ${HGDIR}/nss/automation/abi-check/new-report-$SO.txt
+ rm -f ${HGDIR}/nss/automation/abi-check/new-report-temp$SO.txt
+
+ ABIDIFF_ERROR=$((($RET & 0x01) != 0))
+ ABIDIFF_USAGE_ERROR=$((($RET & 0x02) != 0))
+ ABIDIFF_ABI_CHANGE=$((($RET & 0x04) != 0))
+ ABIDIFF_ABI_INCOMPATIBLE_CHANGE=$((($RET & 0x08) != 0))
+ ABIDIFF_UNKNOWN_BIT_SET=$((($RET & 0xf0) != 0))
+
+ # If abidiff reports an error, or a usage error, or if it sets a result
+ # bit value this script doesn't know yet about, we'll report failure.
+ # For ABI changes, we don't yet report an error. We'll compare the
+ # result report with our whitelist. This allows us to silence changes
+ # that we're already aware of and have been declared acceptable.
+
+ REPORT_RET_AS_FAILURE=0
+ if [ $ABIDIFF_ERROR -ne 0 ]; then
+ echo "abidiff reported ABIDIFF_ERROR."
+ REPORT_RET_AS_FAILURE=1
+ fi
+ if [ $ABIDIFF_USAGE_ERROR -ne 0 ]; then
+ echo "abidiff reported ABIDIFF_USAGE_ERROR."
+ REPORT_RET_AS_FAILURE=1
+ fi
+ if [ $ABIDIFF_UNKNOWN_BIT_SET -ne 0 ]; then
+ echo "abidiff reported ABIDIFF_UNKNOWN_BIT_SET."
+ REPORT_RET_AS_FAILURE=1
+ fi
+
+ if [ $ABIDIFF_ABI_CHANGE -ne 0 ]; then
+ echo "Ignoring abidiff result ABI_CHANGE, instead we'll check for non-whitelisted differences."
+ fi
+ if [ $ABIDIFF_ABI_INCOMPATIBLE_CHANGE -ne 0 ]; then
+ echo "Ignoring abidiff result ABIDIFF_ABI_INCOMPATIBLE_CHANGE, instead we'll check for non-whitelisted differences."
+ fi
+
+ if [ $REPORT_RET_AS_FAILURE -ne 0 ]; then
+ ABI_PROBLEM_FOUND=1
+ echo "abidiff {$PREVDIST , $NEWDIST} for $SO FAILED with result $RET, or failed writing to ${HGDIR}/nss/automation/abi-check/new-report-$SO.txt"
+ fi
+ if [ ! -f ${HGDIR}/nss/automation/abi-check/expected-report-$SO.txt ]; then
+ ABI_PROBLEM_FOUND=1
+ echo "FAILED to access report file: ${HGDIR}/nss/automation/abi-check/expected-report-$SO.txt"
+ fi
+
+ diff -wB -u ${HGDIR}/nss/automation/abi-check/expected-report-$SO.txt \
+ ${HGDIR}/nss/automation/abi-check/new-report-$SO.txt >> ${ABI_REPORT}
+ if [ ! -f ${ABI_REPORT} ]; then
+ ABI_PROBLEM_FOUND=1
+ echo "FAILED to compare exepcted and new report: ${HGDIR}/nss/automation/abi-check/new-report-$SO.txt"
+ fi
+ done
+
+ if [ -s ${ABI_REPORT} ]; then
+ echo "FAILED: there are new unexpected ABI changes"
+ cat ${ABI_REPORT}
+ return 1
+ elif [ $ABI_PROBLEM_FOUND -ne 0 ]; then
+ echo "FAILED: failure executing the ABI checks"
+ cat ${ABI_REPORT}
+ return 1
+ fi
+
+ return 0
+}
+
+check_abi $1
diff --git a/security/nss/automation/taskcluster/scripts/gen_certs.sh b/security/nss/automation/taskcluster/scripts/gen_certs.sh
index b8d4f60ba..c03db7e9c 100755
--- a/security/nss/automation/taskcluster/scripts/gen_certs.sh
+++ b/security/nss/automation/taskcluster/scripts/gen_certs.sh
@@ -12,5 +12,10 @@ NSS_TESTS=cert NSS_CYCLES="standard pkix sharedb" $(dirname $0)/run_tests.sh
echo 1 > tests_results/security/localhost
# Package.
-mkdir artifacts
-tar cvfjh artifacts/dist.tar.bz2 dist tests_results
+if [[ $(uname) = "Darwin" ]]; then
+ mkdir -p public
+ tar cvfjh public/dist.tar.bz2 dist tests_results
+else
+ mkdir artifacts
+ tar cvfjh artifacts/dist.tar.bz2 dist tests_results
+fi
diff --git a/security/nss/automation/taskcluster/scripts/run_hacl.sh b/security/nss/automation/taskcluster/scripts/run_hacl.sh
new file mode 100644
index 000000000..281075eef
--- /dev/null
+++ b/security/nss/automation/taskcluster/scripts/run_hacl.sh
@@ -0,0 +1,40 @@
+#!/usr/bin/env bash
+
+if [[ $(id -u) -eq 0 ]]; then
+ # Drop privileges by re-running this script.
+ # Note: this mangles arguments, better to avoid running scripts as root.
+ exec su worker -c "$0 $*"
+fi
+
+set -e -x -v
+
+# The docker image this is running in has the HACL* and NSS sources.
+# The extracted C code from HACL* is already generated and the HACL* tests were
+# successfully executed.
+
+# Verify Poly1305 (doesn't work in docker image build)
+make verify -C ~/hacl-star/code/poly1305 -j$(nproc)
+
+# Add license header to specs
+spec_files=($(find ~/hacl-star/specs -type f -name '*.fst'))
+for f in "${spec_files[@]}"; do
+ cat /tmp/license.txt "$f" > /tmp/tmpfile && mv /tmp/tmpfile "$f"
+done
+
+# Format the extracted C code.
+cd ~/hacl-star/snapshots/nss
+cp ~/nss/.clang-format .
+find . -type f -name '*.[ch]' -exec clang-format -i {} \+
+
+# These diff commands will return 1 if there are differences and stop the script.
+files=($(find ~/nss/lib/freebl/verified/ -type f -name '*.[ch]'))
+for f in "${files[@]}"; do
+ diff $f $(basename "$f")
+done
+
+# Check that the specs didn't change either.
+cd ~/hacl-star/specs
+files=($(find ~/nss/lib/freebl/verified/specs -type f))
+for f in "${files[@]}"; do
+ diff $f $(basename "$f")
+done
diff --git a/security/nss/automation/taskcluster/scripts/run_saw.sh b/security/nss/automation/taskcluster/scripts/run_saw.sh
new file mode 100644
index 000000000..0e9a8224a
--- /dev/null
+++ b/security/nss/automation/taskcluster/scripts/run_saw.sh
@@ -0,0 +1,9 @@
+#!/usr/bin/env bash
+
+source $(dirname "$0")/tools.sh
+
+# Fetch artifact if needed.
+fetch_dist
+
+# Run SAW.
+saw "nss/automation/saw/$1.saw"
diff --git a/security/nss/automation/taskcluster/scripts/run_scan_build.sh b/security/nss/automation/taskcluster/scripts/run_scan_build.sh
index 4024c226e..014530b42 100755
--- a/security/nss/automation/taskcluster/scripts/run_scan_build.sh
+++ b/security/nss/automation/taskcluster/scripts/run_scan_build.sh
@@ -34,7 +34,7 @@ for i in "${!scan[@]}"; do
done
# run scan-build (only building affected directories)
-scan-build -o /home/worker/artifacts --use-cc=$CC --use-c++=$CCC make nss_build_all && cd ..
+scan-build-5.0 -o /home/worker/artifacts --use-cc=$CC --use-c++=$CCC make nss_build_all && cd ..
# print errors we found
set +v +x
diff --git a/security/nss/automation/taskcluster/scripts/split.sh b/security/nss/automation/taskcluster/scripts/split.sh
index 4d18385ec..fded64e1b 100644
--- a/security/nss/automation/taskcluster/scripts/split.sh
+++ b/security/nss/automation/taskcluster/scripts/split.sh
@@ -23,16 +23,10 @@ split_util() {
# Copy everything.
cp -R $nssdir $dstdir
- # Skip gtests when building.
- sed '/^DIRS = /s/ cpputil gtests$//' $nssdir/manifest.mn > $dstdir/manifest.mn-t && mv $dstdir/manifest.mn-t $dstdir/manifest.mn
-
# Remove subdirectories that we don't want.
rm -rf $dstdir/cmd
- rm -rf $dstdir/tests
rm -rf $dstdir/lib
rm -rf $dstdir/automation
- rm -rf $dstdir/gtests
- rm -rf $dstdir/cpputil
rm -rf $dstdir/doc
# Start with an empty cmd lib directories to be filled selectively.
diff --git a/security/nss/automation/taskcluster/windows/releng.manifest b/security/nss/automation/taskcluster/windows/releng.manifest
index 68d2c1d9e..d571c544d 100644
--- a/security/nss/automation/taskcluster/windows/releng.manifest
+++ b/security/nss/automation/taskcluster/windows/releng.manifest
@@ -1,10 +1,10 @@
[
{
- "version": "Visual Studio 2015 Update 3 14.0.25425.01 / SDK 10.0.14393.0",
- "size": 326656969,
- "digest": "babc414ffc0457d27f5a1ed24a8e4873afbe2f1c1a4075469a27c005e1babc3b2a788f643f825efedff95b79686664c67ec4340ed535487168a3482e68559bc7",
+ "version": "Visual Studio 2017 15.4.2 / SDK 10.0.15063.0",
+ "size": 303146863,
+ "digest": "18700889e6b5e81613b9cf57ce4e0d46a6ee45bb4c5c33bae2604a5275326128775b8a032a1eb178c5db973746d565340c4e36d98375789e1d5bd836ab16ba58",
"algorithm": "sha512",
- "filename": "vs2015u3.zip",
+ "filename": "vs2017_15.4.2.zip",
"unpack": true
},
{
diff --git a/security/nss/automation/taskcluster/windows/setup.sh b/security/nss/automation/taskcluster/windows/setup.sh
index 7def50db4..36a040ba1 100644
--- a/security/nss/automation/taskcluster/windows/setup.sh
+++ b/security/nss/automation/taskcluster/windows/setup.sh
@@ -2,12 +2,12 @@
set -v -e -x
-export VSPATH="$(pwd)/vs2015u3"
+export VSPATH="$(pwd)/vs2017_15.4.2"
export NINJA_PATH="$(pwd)/ninja/bin"
export WINDOWSSDKDIR="${VSPATH}/SDK"
export VS90COMNTOOLS="${VSPATH}/VC"
-export INCLUDE="${VSPATH}/VC/include:${VSPATH}/SDK/Include/10.0.14393.0/ucrt:${VSPATH}/SDK/Include/10.0.14393.0/shared:${VSPATH}/SDK/Include/10.0.14393.0/um"
+export INCLUDE="${VSPATH}/VC/include:${VSPATH}/SDK/Include/10.0.15063.0/ucrt:${VSPATH}/SDK/Include/10.0.15063.0/shared:${VSPATH}/SDK/Include/10.0.15063.0/um"
# Usage: hg_clone repo dir [revision=@]
hg_clone() {
@@ -23,4 +23,4 @@ hg_clone() {
}
hg_clone https://hg.mozilla.org/build/tools tools default
-tools/scripts/tooltool/tooltool_wrapper.sh $(dirname $0)/releng.manifest https://api.pub.build.mozilla.org/tooltool/ non-existant-file.sh /c/mozilla-build/python/python.exe /c/builds/tooltool.py --authentication-file /c/builds/relengapi.tok -c /c/builds/tooltool_cache
+tools/scripts/tooltool/tooltool_wrapper.sh $(dirname $0)/releng.manifest https://tooltool.mozilla-releng.net/ non-existant-file.sh /c/mozilla-build/python/python.exe /c/builds/tooltool.py --authentication-file /c/builds/relengapi.tok -c /c/builds/tooltool_cache
diff --git a/security/nss/automation/taskcluster/windows/setup32.sh b/security/nss/automation/taskcluster/windows/setup32.sh
index bcddabfa3..19bed284d 100644
--- a/security/nss/automation/taskcluster/windows/setup32.sh
+++ b/security/nss/automation/taskcluster/windows/setup32.sh
@@ -4,7 +4,7 @@ set -v -e -x
source $(dirname $0)/setup.sh
-export WIN32_REDIST_DIR="${VSPATH}/VC/redist/x86/Microsoft.VC140.CRT"
+export WIN32_REDIST_DIR="${VSPATH}/VC/redist/x86/Microsoft.VC141.CRT"
export WIN_UCRT_REDIST_DIR="${VSPATH}/SDK/Redist/ucrt/DLLs/x86"
-export PATH="${NINJA_PATH}:${VSPATH}/VC/bin/amd64_x86:${VSPATH}/VC/bin/amd64:${VSPATH}/VC/bin:${VSPATH}/SDK/bin/x86:${VSPATH}/SDK/bin/x64:${VSPATH}/VC/redist/x86/Microsoft.VC140.CRT:${VSPATH}/VC/redist/x64/Microsoft.VC140.CRT:${VSPATH}/SDK/Redist/ucrt/DLLs/x86:${VSPATH}/SDK/Redist/ucrt/DLLs/x64:${PATH}"
-export LIB="${VSPATH}/VC/lib:${VSPATH}/SDK/lib/10.0.14393.0/ucrt/x86:${VSPATH}/SDK/lib/10.0.14393.0/um/x86"
+export PATH="${NINJA_PATH}:${VSPATH}/VC/bin/Hostx64/x86:${VSPATH}/VC/bin/Hostx64/x64:${VSPATH}/VC/Hostx86/x86:${VSPATH}/SDK/bin/10.0.15063.0/x64:${VSPATH}/VC/redist/x86/Microsoft.VC141.CRT:${VSPATH}/SDK/Redist/ucrt/DLLs/x86:${PATH}"
+export LIB="${VSPATH}/VC/lib/x86:${VSPATH}/SDK/lib/10.0.15063.0/ucrt/x86:${VSPATH}/SDK/lib/10.0.15063.0/um/x86"
diff --git a/security/nss/automation/taskcluster/windows/setup64.sh b/security/nss/automation/taskcluster/windows/setup64.sh
index f308298c1..d16cb0ec9 100644
--- a/security/nss/automation/taskcluster/windows/setup64.sh
+++ b/security/nss/automation/taskcluster/windows/setup64.sh
@@ -4,7 +4,7 @@ set -v -e -x
source $(dirname $0)/setup.sh
-export WIN32_REDIST_DIR="${VSPATH}/VC/redist/x64/Microsoft.VC140.CRT"
+export WIN32_REDIST_DIR="${VSPATH}/VC/redist/x64/Microsoft.VC141.CRT"
export WIN_UCRT_REDIST_DIR="${VSPATH}/SDK/Redist/ucrt/DLLs/x64"
-export PATH="${NINJA_PATH}:${VSPATH}/VC/bin/amd64:${VSPATH}/VC/bin:${VSPATH}/SDK/bin/x64:${VSPATH}/VC/redist/x64/Microsoft.VC140.CRT:${VSPATH}/SDK/Redist/ucrt/DLLs/x64:${PATH}"
-export LIB="${VSPATH}/VC/lib/amd64:${VSPATH}/SDK/lib/10.0.14393.0/ucrt/x64:${VSPATH}/SDK/lib/10.0.14393.0/um/x64"
+export PATH="${NINJA_PATH}:${VSPATH}/VC/bin/Hostx64/x64:${VSPATH}/VC/bin/Hostx86/x86:${VSPATH}/SDK/bin/10.0.15063.0/x64:${VSPATH}/VC/redist/x64/Microsoft.VC141.CRT:${VSPATH}/SDK/Redist/ucrt/DLLs/x64:${PATH}"
+export LIB="${VSPATH}/VC/lib/x64:${VSPATH}/SDK/lib/10.0.15063.0/ucrt/x64:${VSPATH}/SDK/lib/10.0.15063.0/um/x64"