summaryrefslogtreecommitdiffstats
path: root/security/nss/automation
diff options
context:
space:
mode:
Diffstat (limited to 'security/nss/automation')
-rw-r--r--security/nss/automation/abi-check/expected-report-libssl3.so.txt28
-rw-r--r--security/nss/automation/abi-check/previous-nss-release2
-rwxr-xr-xsecurity/nss/automation/buildbot-slave/build.sh78
-rw-r--r--security/nss/automation/clang-format/run_clang_format.sh21
-rw-r--r--security/nss/automation/clang-format/setup.sh4
-rw-r--r--security/nss/automation/release/nspr-version.txt2
-rw-r--r--security/nss/automation/saw/bmul.cry8
-rw-r--r--security/nss/automation/saw/bmul.saw26
-rw-r--r--security/nss/automation/saw/chacha20.cry357
-rw-r--r--security/nss/automation/saw/chacha20.saw40
-rw-r--r--security/nss/automation/saw/poly1305-hacl.saw38
-rw-r--r--security/nss/automation/saw/poly1305.cry336
-rw-r--r--security/nss/automation/saw/poly1305.saw47
-rw-r--r--security/nss/automation/taskcluster/docker-clang-3.9/setup.sh4
-rw-r--r--security/nss/automation/taskcluster/docker-decision/Dockerfile3
-rw-r--r--security/nss/automation/taskcluster/docker-decision/bin/checkout.sh5
-rw-r--r--security/nss/automation/taskcluster/docker-gcc-4.4/Dockerfile30
-rw-r--r--security/nss/automation/taskcluster/docker-gcc-4.4/bin/checkout.sh20
-rw-r--r--security/nss/automation/taskcluster/docker-gcc-4.4/setup.sh30
-rw-r--r--security/nss/automation/taskcluster/docker-hacl/Dockerfile30
-rw-r--r--security/nss/automation/taskcluster/docker-hacl/bin/checkout.sh20
-rw-r--r--security/nss/automation/taskcluster/docker-hacl/license.txt15
-rw-r--r--security/nss/automation/taskcluster/docker-hacl/setup-user.sh26
-rw-r--r--security/nss/automation/taskcluster/docker-hacl/setup.sh30
-rw-r--r--security/nss/automation/taskcluster/docker-saw/Dockerfile46
-rw-r--r--security/nss/automation/taskcluster/docker-saw/LLVMgold.so.zipbin0 -> 13558285 bytes
-rw-r--r--security/nss/automation/taskcluster/docker-saw/bin/checkout.sh15
-rw-r--r--security/nss/automation/taskcluster/docker/setup.sh22
-rw-r--r--security/nss/automation/taskcluster/graph/src/context_hash.js16
-rw-r--r--security/nss/automation/taskcluster/graph/src/extend.js355
-rw-r--r--security/nss/automation/taskcluster/graph/src/image_builder.js13
-rw-r--r--security/nss/automation/taskcluster/graph/src/queue.js5
-rw-r--r--security/nss/automation/taskcluster/graph/src/try_syntax.js12
-rw-r--r--security/nss/automation/taskcluster/image_builder/Dockerfile23
-rw-r--r--security/nss/automation/taskcluster/image_builder/VERSION1
-rw-r--r--security/nss/automation/taskcluster/image_builder/bin/checkout.sh15
-rwxr-xr-xsecurity/nss/automation/taskcluster/scripts/build_gyp.sh9
-rw-r--r--security/nss/automation/taskcluster/scripts/build_image.sh24
-rw-r--r--security/nss/automation/taskcluster/scripts/check_abi.sh172
-rwxr-xr-xsecurity/nss/automation/taskcluster/scripts/gen_certs.sh9
-rw-r--r--security/nss/automation/taskcluster/scripts/run_hacl.sh40
-rw-r--r--security/nss/automation/taskcluster/scripts/run_saw.sh9
-rwxr-xr-xsecurity/nss/automation/taskcluster/scripts/run_scan_build.sh2
-rw-r--r--security/nss/automation/taskcluster/scripts/split.sh6
-rw-r--r--security/nss/automation/taskcluster/windows/releng.manifest8
-rw-r--r--security/nss/automation/taskcluster/windows/setup.sh6
-rw-r--r--security/nss/automation/taskcluster/windows/setup32.sh6
-rw-r--r--security/nss/automation/taskcluster/windows/setup64.sh6
48 files changed, 1922 insertions, 98 deletions
diff --git a/security/nss/automation/abi-check/expected-report-libssl3.so.txt b/security/nss/automation/abi-check/expected-report-libssl3.so.txt
index e69de29bb..ad818d0aa 100644
--- a/security/nss/automation/abi-check/expected-report-libssl3.so.txt
+++ b/security/nss/automation/abi-check/expected-report-libssl3.so.txt
@@ -0,0 +1,28 @@
+
+1 function with some indirect sub-type change:
+
+ [C]'function SECStatus SSL_GetChannelInfo(PRFileDesc*, SSLChannelInfo*, PRUintn)' at sslinfo.c:12:1 has some indirect sub-type changes:
+ parameter 2 of type 'SSLChannelInfo*' has sub-type changes:
+ in pointed to type 'typedef SSLChannelInfo' at sslt.h:318:1:
+ underlying type 'struct SSLChannelInfoStr' at sslt.h:251:1 changed:
+ type size hasn't changed
+ 1 data member change:
+ type of 'SSLSignatureScheme SSLChannelInfoStr::signatureScheme' changed:
+ underlying type 'enum __anonymous_enum__' at sslt.h:115:1 changed:
+ type size hasn't changed
+ 3 enumerator deletions:
+ '__anonymous_enum__::ssl_sig_rsa_pss_sha256' value '2052'
+ '__anonymous_enum__::ssl_sig_rsa_pss_sha384' value '2053'
+ '__anonymous_enum__::ssl_sig_rsa_pss_sha512' value '2054'
+
+ 6 enumerator insertions:
+ '__anonymous_enum__::ssl_sig_rsa_pss_rsae_sha256' value '2052'
+ '__anonymous_enum__::ssl_sig_rsa_pss_rsae_sha384' value '2053'
+ '__anonymous_enum__::ssl_sig_rsa_pss_rsae_sha512' value '2054'
+ '__anonymous_enum__::ssl_sig_rsa_pss_pss_sha256' value '2057'
+ '__anonymous_enum__::ssl_sig_rsa_pss_pss_sha384' value '2058'
+ '__anonymous_enum__::ssl_sig_rsa_pss_pss_sha512' value '2059'
+
+
+
+
diff --git a/security/nss/automation/abi-check/previous-nss-release b/security/nss/automation/abi-check/previous-nss-release
index b8d28cde0..c213ca3f8 100644
--- a/security/nss/automation/abi-check/previous-nss-release
+++ b/security/nss/automation/abi-check/previous-nss-release
@@ -1 +1 @@
-NSS_3_31_BRANCH
+NSS_3_35_BRANCH
diff --git a/security/nss/automation/buildbot-slave/build.sh b/security/nss/automation/buildbot-slave/build.sh
index 3fc914803..00e749672 100755
--- a/security/nss/automation/buildbot-slave/build.sh
+++ b/security/nss/automation/buildbot-slave/build.sh
@@ -212,7 +212,7 @@ test_nss()
RET=$?
print_log "######## details of detected failures (if any) ########"
- grep -B50 FAILED ${OUTPUTFILE}
+ grep -B50 -w FAILED ${OUTPUTFILE}
[ $? -eq 1 ] || RET=1
print_result "NSS - tests - ${BITS} bits - ${OPT}" ${RET} 0
@@ -236,11 +236,14 @@ check_abi()
BASE_NSPR=NSPR_$(head -1 ${HGDIR}/baseline/nss/automation/release/nspr-version.txt | cut -d . -f 1-2 | tr . _)_BRANCH
hg clone -u "${BASE_NSPR}" "${HGDIR}/nspr" "${HGDIR}/baseline/nspr"
if [ $? -ne 0 ]; then
- echo "invalid tag ${BASE_NSPR} derived from ${BASE_NSS} automation/release/nspr-version.txt"
- return 1
+ echo "nonexisting tag ${BASE_NSPR} derived from ${BASE_NSS} automation/release/nspr-version.txt"
+ # Assume that version hasn't been released yet, fall back to trunk
+ pushd "${HGDIR}/baseline/nspr"
+ hg update default
+ popd
fi
- print_log "######## building older NSPR/NSS ########"
+ print_log "######## building baseline NSPR/NSS ########"
pushd ${HGDIR}/baseline/nss
print_log "$ ${MAKE} ${NSS_BUILD_TARGET}"
@@ -253,26 +256,83 @@ check_abi()
fi
popd
+ ABI_PROBLEM_FOUND=0
ABI_REPORT=${OUTPUTDIR}/abi-diff.txt
rm -f ${ABI_REPORT}
PREVDIST=${HGDIR}/baseline/dist
NEWDIST=${HGDIR}/dist
ALL_SOs="libfreebl3.so libfreeblpriv3.so libnspr4.so libnss3.so libnssckbi.so libnssdbm3.so libnsssysinit.so libnssutil3.so libplc4.so libplds4.so libsmime3.so libsoftokn3.so libssl3.so"
for SO in ${ALL_SOs}; do
- if [ ! -f nss/automation/abi-check/expected-report-$SO.txt ]; then
- touch nss/automation/abi-check/expected-report-$SO.txt
+ if [ ! -f ${HGDIR}/nss/automation/abi-check/expected-report-$SO.txt ]; then
+ touch ${HGDIR}/nss/automation/abi-check/expected-report-$SO.txt
fi
abidiff --hd1 $PREVDIST/public/ --hd2 $NEWDIST/public \
$PREVDIST/*/lib/$SO $NEWDIST/*/lib/$SO \
- > nss/automation/abi-check/new-report-$SO.txt
- diff -u nss/automation/abi-check/expected-report-$SO.txt \
- nss/automation/abi-check/new-report-$SO.txt >> ${ABI_REPORT}
+ > ${HGDIR}/nss/automation/abi-check/new-report-temp$SO.txt
+ RET=$?
+ cat ${HGDIR}/nss/automation/abi-check/new-report-temp$SO.txt \
+ | grep -v "^Functions changes summary:" \
+ | grep -v "^Variables changes summary:" \
+ > ${HGDIR}/nss/automation/abi-check/new-report-$SO.txt
+ rm -f ${HGDIR}/nss/automation/abi-check/new-report-temp$SO.txt
+ ABIDIFF_ERROR=$((($RET & 0x01) != 0))
+ ABIDIFF_USAGE_ERROR=$((($RET & 0x02) != 0))
+ ABIDIFF_ABI_CHANGE=$((($RET & 0x04) != 0))
+ ABIDIFF_ABI_INCOMPATIBLE_CHANGE=$((($RET & 0x08) != 0))
+ ABIDIFF_UNKNOWN_BIT_SET=$((($RET & 0xf0) != 0))
+
+ # If abidiff reports an error, or a usage error, or if it sets a result
+ # bit value this script doesn't know yet about, we'll report failure.
+ # For ABI changes, we don't yet report an error. We'll compare the
+ # result report with our whitelist. This allows us to silence changes
+ # that we're already aware of and have been declared acceptable.
+
+ REPORT_RET_AS_FAILURE=0
+ if [ $ABIDIFF_ERROR -ne 0 ]; then
+ print_log "abidiff reported ABIDIFF_ERROR."
+ REPORT_RET_AS_FAILURE=1
+ fi
+ if [ $ABIDIFF_USAGE_ERROR -ne 0 ]; then
+ print_log "abidiff reported ABIDIFF_USAGE_ERROR."
+ REPORT_RET_AS_FAILURE=1
+ fi
+ if [ $ABIDIFF_UNKNOWN_BIT_SET -ne 0 ]; then
+ print_log "abidiff reported ABIDIFF_UNKNOWN_BIT_SET."
+ REPORT_RET_AS_FAILURE=1
+ fi
+
+ if [ $ABIDIFF_ABI_CHANGE -ne 0 ]; then
+ print_log "Ignoring abidiff result ABI_CHANGE, instead we'll check for non-whitelisted differences."
+ fi
+ if [ $ABIDIFF_ABI_INCOMPATIBLE_CHANGE -ne 0 ]; then
+ print_log "Ignoring abidiff result ABIDIFF_ABI_INCOMPATIBLE_CHANGE, instead we'll check for non-whitelisted differences."
+ fi
+
+ if [ $REPORT_RET_AS_FAILURE -ne 0 ]; then
+ ABI_PROBLEM_FOUND=1
+ print_log "abidiff {$PREVDIST , $NEWDIST} for $SO FAILED with result $RET, or failed writing to ${HGDIR}/nss/automation/abi-check/new-report-$SO.txt"
+ fi
+ if [ ! -f ${HGDIR}/nss/automation/abi-check/expected-report-$SO.txt ]; then
+ ABI_PROBLEM_FOUND=1
+ print_log "FAILED to access report file: ${HGDIR}/nss/automation/abi-check/expected-report-$SO.txt"
+ fi
+
+ diff -wB -u ${HGDIR}/nss/automation/abi-check/expected-report-$SO.txt \
+ ${HGDIR}/nss/automation/abi-check/new-report-$SO.txt >> ${ABI_REPORT}
+ if [ ! -f ${ABI_REPORT} ]; then
+ ABI_PROBLEM_FOUND=1
+ print_log "FAILED to compare exepcted and new report: ${HGDIR}/nss/automation/abi-check/new-report-$SO.txt"
+ fi
done
if [ -s ${ABI_REPORT} ]; then
print_log "FAILED: there are new unexpected ABI changes"
cat ${ABI_REPORT}
return 1
+ elif [ $ABI_PROBLEM_FOUND -ne 0 ]; then
+ print_log "FAILED: failure executing the ABI checks"
+ cat ${ABI_REPORT}
+ return 1
fi
return 0
diff --git a/security/nss/automation/clang-format/run_clang_format.sh b/security/nss/automation/clang-format/run_clang_format.sh
index 2ba5ebeb1..378b00ff0 100644
--- a/security/nss/automation/clang-format/run_clang_format.sh
+++ b/security/nss/automation/clang-format/run_clang_format.sh
@@ -6,6 +6,8 @@ if [[ $(id -u) -eq 0 ]]; then
exec su worker -c "$0 $*"
fi
+set -e
+
# Apply clang-format on the provided folder and verify that this doesn't change any file.
# If any file differs after formatting, the script eventually exits with 1.
# Any differences between formatted and unformatted files is printed to stdout to give a hint what's wrong.
@@ -21,17 +23,16 @@ blacklist=(
"./lib/zlib" \
"./lib/sqlite" \
"./gtests/google_test" \
- "./.hg" \
"./out" \
)
-top="$(dirname $0)/../.."
-cd "$top"
+top=$(cd "$(dirname $0)/../.."; pwd -P)
if [ $# -gt 0 ]; then
dirs=("$@")
else
- dirs=($(find . -maxdepth 2 -mindepth 1 -type d ! -path . \( ! -regex '.*/' \)))
+ cd "$top"
+ dirs=($(find . -maxdepth 2 -mindepth 1 -type d ! -path '*/.*' -print))
fi
format_folder()
@@ -46,20 +47,20 @@ format_folder()
}
for dir in "${dirs[@]}"; do
- if format_folder "$dir" ; then
+ if format_folder "$dir"; then
c="${dir//[^\/]}"
echo "formatting $dir ..."
- depth=""
+ depth=()
if [ "${#c}" == "1" ]; then
- depth="-maxdepth 1"
+ depth+=(-maxdepth 1)
fi
- find "$dir" $depth -type f \( -name '*.[ch]' -o -name '*.cc' \) -exec clang-format -i {} \+
+ find "$dir" "${depth[@]}" -type f \( -name '*.[ch]' -o -name '*.cc' \) -exec clang-format -i {} \+
fi
done
TMPFILE=$(mktemp /tmp/$(basename $0).XXXXXX)
-trap 'rm $TMPFILE' exit
-if (cd $(dirname $0); hg root >/dev/null 2>&1); then
+trap 'rm -f $TMPFILE' exit
+if [[ -d "$top/.hg" ]]; then
hg diff --git "$top" | tee $TMPFILE
else
git -C "$top" diff | tee $TMPFILE
diff --git a/security/nss/automation/clang-format/setup.sh b/security/nss/automation/clang-format/setup.sh
index 9b2480e90..beac9e905 100644
--- a/security/nss/automation/clang-format/setup.sh
+++ b/security/nss/automation/clang-format/setup.sh
@@ -17,8 +17,8 @@ apt_packages+=('locales')
apt-get install -y --no-install-recommends ${apt_packages[@]}
# Download clang.
-curl -L http://releases.llvm.org/3.9.1/clang+llvm-3.9.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz -o clang.tar.xz
-curl -L http://releases.llvm.org/3.9.1/clang+llvm-3.9.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz.sig -o clang.tar.xz.sig
+curl -L https://releases.llvm.org/3.9.1/clang+llvm-3.9.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz -o clang.tar.xz
+curl -L https://releases.llvm.org/3.9.1/clang+llvm-3.9.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz.sig -o clang.tar.xz.sig
# Verify the signature.
gpg --keyserver pool.sks-keyservers.net --recv-keys B6C8F98282B944E3B0D5C2530FC3042E345AD05D
gpg --verify clang.tar.xz.sig
diff --git a/security/nss/automation/release/nspr-version.txt b/security/nss/automation/release/nspr-version.txt
index 98783a615..701680d2c 100644
--- a/security/nss/automation/release/nspr-version.txt
+++ b/security/nss/automation/release/nspr-version.txt
@@ -1,4 +1,4 @@
-4.16
+4.19
# The first line of this file must contain the human readable NSPR
# version number, which is the minimum required version of NSPR
diff --git a/security/nss/automation/saw/bmul.cry b/security/nss/automation/saw/bmul.cry
new file mode 100644
index 000000000..87303dad6
--- /dev/null
+++ b/security/nss/automation/saw/bmul.cry
@@ -0,0 +1,8 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+bmul : {n,m} (fin n, n >= 1, m == n*2 - 1) => [n] -> [n] -> ([n], [n])
+bmul a b = (take`{n} prod, drop`{n} prod)
+ where prod = pad (pmult a b : [m])
+ pad x = zero # x
diff --git a/security/nss/automation/saw/bmul.saw b/security/nss/automation/saw/bmul.saw
new file mode 100644
index 000000000..22cd2757b
--- /dev/null
+++ b/security/nss/automation/saw/bmul.saw
@@ -0,0 +1,26 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import "bmul.cry";
+
+print "Loading LLVM bitcode...";
+m <- llvm_load_module "../../../dist/Debug/lib/libfreeblpriv3.so.bc";
+
+let SpecBinaryMul n = do {
+ x <- llvm_var "x" (llvm_int n);
+ y <- llvm_var "y" (llvm_int n);
+ llvm_ptr "r_high" (llvm_int n);
+ r_high <- llvm_var "*r_high" (llvm_int n);
+ llvm_ptr "r_low" (llvm_int n);
+ r_low <- llvm_var "*r_low" (llvm_int n);
+
+ let res = {{ bmul x y }};
+ llvm_ensure_eq "*r_high" {{ res.0 }};
+ llvm_ensure_eq "*r_low" {{ res.1 }};
+
+ llvm_verify_tactic abc;
+};
+
+print "Proving equality for 32-bit bmul()...";
+time (llvm_verify m "bmul32" [] (SpecBinaryMul 32));
diff --git a/security/nss/automation/saw/chacha20.cry b/security/nss/automation/saw/chacha20.cry
new file mode 100644
index 000000000..0b52d51ad
--- /dev/null
+++ b/security/nss/automation/saw/chacha20.cry
@@ -0,0 +1,357 @@
+/*
+** ChaCha20 specification
+** Author: Austin Seipp <aseipp@pobox.com>. Released in the Public Domain.
+**
+** Based on RFC 7539 - "ChaCha20 and Poly1305 for IETF Protocols"
+** https://tools.ietf.org/html/rfc7539
+*/
+module chacha20 where
+
+/* -------------------------------------------------------------------------- */
+/* -- Implementation -------------------------------------------------------- */
+
+type Round = [16][32] // An input to the ChaCha20 core function
+type Block = [64][8] // An output block from the ChaCha20 core function.
+type Key = [32][8] // A 32-byte input key
+type Nonce = [12][8] // A 12-byte nonce
+type Counter = [32] // Starting block counter. Usually 1 or 0.
+
+/* ---------------------------------- */
+/* -- Quarter Round ----------------- */
+
+// The quarter round. This takes 4 32-bit integers and diffuses them
+// appropriately, and is the core of the column and diagonal round.
+qround : [4][32] -> [4][32]
+qround [ a0, b0, c0, d0 ] = [ a2, b4, c2, d4 ]
+ where
+ a1 = a0 + b0 /* a += b; d ^= a; d <<<= 16 */
+ d1 = d0 ^ a1
+ d2 = d1 <<< 16
+
+ c1 = c0 + d2 /* c += d; b ^= c; b <<<= 12 */
+ b1 = b0 ^ c1
+ b2 = b1 <<< 12
+
+ a2 = a1 + b2 /* a += b; d ^= a; d <<<= 8 */
+ d3 = d2 ^ a2
+ d4 = d3 <<< 8
+
+ c2 = c1 + d4 /* c += d; b ^= c; b <<<= 7 */
+ b3 = b2 ^ c2
+ b4 = b3 <<< 7
+
+
+/* ---------------------------------- */
+/* -- Column and diagonal rounds ---- */
+
+// Perform the column round, followed by the diagonal round on the
+// input state, which are both defined in terms of the quarter
+// round. ChaCha20 requires 20 total rounds of interleaving
+// column/diagonal passes on the state, and therefore `cdround` actually
+// does two passes at once (mostly for simplicity).
+cdround : Round -> Round
+cdround [ x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15 ]
+ = [ z0, z1, z2, z3, z4, z5, z6, z7, z8, z9, z10, z11, z12, z13, z14, z15 ]
+ where
+ // Column round
+ [ y0, y4, y8, y12 ] = qround [ x0, x4, x8, x12 ]
+ [ y1, y5, y9, y13 ] = qround [ x1, x5, x9, x13 ]
+ [ y2, y6, y10, y14 ] = qround [ x2, x6, x10, x14 ]
+ [ y3, y7, y11, y15 ] = qround [ x3, x7, x11, x15 ]
+
+ // Diagonal round
+ [ z0, z5, z10, z15 ] = qround [ y0, y5, y10, y15 ]
+ [ z1, z6, z11, z12 ] = qround [ y1, y6, y11, y12 ]
+ [ z2, z7, z8, z13 ] = qround [ y2, y7, y8, y13 ]
+ [ z3, z4, z9, z14 ] = qround [ y3, y4, y9, y14 ]
+
+
+/* ---------------------------------- */
+/* -- Block encryption -------------- */
+
+// Given an input round, calculate the core ChaCha20 algorithm over
+// the round and return an output block. These output blocks form the
+// stream which you XOR your plaintext with, and successive iterations of
+// the core algorithm result in an infinite stream you can use as a
+// cipher.
+core : Round -> Block
+core x = block
+ where
+ rounds = iterate cdround x // Do a bunch of column/diagonal passes...
+ result = rounds @ 10 // And grab the 10th result (20 total passes)
+ block = blocked (x + result) // Add to input, convert to output block
+
+
+/* ---------------------------------- */
+/* -- Key Expansion ----------------- */
+
+// Key expansion. Given a nonce and a key, compute a round (which is
+// fed to the core algorithm above) by taking the initial round state and
+// mixing in the key and nonce appropriately.
+kexp : Key -> Counter -> Nonce -> Round
+kexp k c n = [ c0, c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11, c12, c13, c14, c15 ]
+ where
+ // The following describes the layout of the output round, which
+ // is fed into the core algorithm successively.
+
+ // Bytes 0-3: Constants
+ [ c0, c1, c2, c3 ] = [ 0x61707865, 0x3320646e, 0x79622d32, 0x6b206574 ]
+
+ // Bytes 4-11: Key
+ [ c4, c5, c6, c7 ] = map rjoin (groupBy`{4} kslice1 : [4][4][8]) : [4][32]
+ [ c8, c9, c10, c11 ] = map rjoin (groupBy`{4} kslice2 : [4][4][8]) : [4][32]
+ kslice1 = k @@ ([ 0 .. 15 ] : [16][32]) // Top half
+ kslice2 = k @@ ([ 16 .. 31 ] : [16][32]) // Bottom half
+
+ // Bytes 12: Counter, starts off with whatever the user specified
+ // (usually 0 or 1)
+ [ c12 ] = [ c ]
+
+ // Bytes 14-15: Nonce
+ [ c13, c14, c15 ] = map rjoin (groupBy`{4} n)
+
+
+/* ---------------------------------- */
+/* -- Round increments -------------- */
+
+// Take a given number of iterations and the input round (after key
+// expansion!), and calculate the input round for the core algorithm
+// function. This allows you to index into a particular Round which
+// can be passed to the 'core' function.
+iround : [64] -> Round -> Round
+iround n r = (iterate once r) @ n where
+ // Given a round, increment the counter inside (index no 12)
+ once [ x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15 ]
+ = [ x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12+1, x13, x14, x15 ]
+
+/* ---------------------------------- */
+/* -- ChaCha20 encryption ----------- */
+
+// Produce a psuedo-random stream given a nonce and a key, which can
+// be XOR'd with your data to encrypt it.
+stream : {n} (fin n) => Key -> Counter -> Nonce -> [n][8]
+stream k c n = take`{n} (join rounds) // Take n bytes from the final result
+ where
+ // Expand key
+ key = kexp k c n
+
+ // Produce the stream by successively incrementing the input round
+ // by `i`, and running the core algorithm to get the resulting
+ // stream for the `i`th input. Once these are concatenated, you have
+ // an infinite list representing the ChaCha20 stream.
+ rounds = [ core (iround i key) | i <- [ 0, 1 ... ] ]
+
+
+// Given an message, a nonce, and a key, produce an encrypted
+// message. This is simply defined as the XOR of the message and the
+// corresponding encryption stream.
+encrypt : {n} (fin n) => Key -> Counter -> Nonce -> [n][8] -> [n][8]
+encrypt k c n m = m ^ (stream k c n)
+
+/* -------------------------------------------------------------------------- */
+/* -- Theorems, tests ------------------------------------------------------- */
+
+// Tests are private
+private
+ qround01 = qround in == out
+ where
+ in = [ 0x11111111, 0x01020304, 0x9b8d6f43, 0x01234567 ]
+ out = [ 0xea2a92f4, 0xcb1cf8ce, 0x4581472e, 0x5881c4bb ]
+
+ core01 = kexp k 1 n == out
+ where
+ n = [ 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x4a,
+ 0x00, 0x00, 0x00, 0x00 ]
+ k = [ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+ 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+ 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+ 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f ]
+ out = [ 0x61707865, 0x3320646e, 0x79622d32, 0x6b206574,
+ 0x03020100, 0x07060504, 0x0b0a0908, 0x0f0e0d0c,
+ 0x13121110, 0x17161514, 0x1b1a1918, 0x1f1e1d1c,
+ 0x00000001, 0x09000000, 0x4a000000, 0x00000000 ]
+
+ core02 = core (kexp k 1 n) == out
+ where
+ n = [ 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x4a,
+ 0x00, 0x00, 0x00, 0x00 ]
+ k = [ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
+ 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
+ 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
+ 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f ]
+ out = [ 0x10, 0xf1, 0xe7, 0xe4, 0xd1, 0x3b, 0x59, 0x15,
+ 0x50, 0x0f, 0xdd, 0x1f, 0xa3, 0x20, 0x71, 0xc4,
+ 0xc7, 0xd1, 0xf4, 0xc7, 0x33, 0xc0, 0x68, 0x03,
+ 0x04, 0x22, 0xaa, 0x9a, 0xc3, 0xd4, 0x6c, 0x4e,
+ 0xd2, 0x82, 0x64, 0x46, 0x07, 0x9f, 0xaa, 0x09,
+ 0x14, 0xc2, 0xd7, 0x05, 0xd9, 0x8b, 0x02, 0xa2,
+ 0xb5, 0x12, 0x9c, 0xd1, 0xde, 0x16, 0x4e, 0xb9,
+ 0xcb, 0xd0, 0x83, 0xe8, 0xa2, 0x50, 0x3c, 0x4e ]
+
+ rfctest01 = encrypt zero zero zero zero
+ == [ 0x76, 0xb8, 0xe0, 0xad, 0xa0, 0xf1, 0x3d, 0x90, 0x40, 0x5d,
+ 0x6a, 0xe5, 0x53, 0x86, 0xbd, 0x28, 0xbd, 0xd2, 0x19, 0xb8,
+ 0xa0, 0x8d, 0xed, 0x1a, 0xa8, 0x36, 0xef, 0xcc, 0x8b, 0x77,
+ 0x0d, 0xc7, 0xda, 0x41, 0x59, 0x7c, 0x51, 0x57, 0x48, 0x8d,
+ 0x77, 0x24, 0xe0, 0x3f, 0xb8, 0xd8, 0x4a, 0x37, 0x6a, 0x43,
+ 0xb8, 0xf4, 0x15, 0x18, 0xa1, 0x1c, 0xc3, 0x87, 0xb6, 0x69,
+ 0xb2, 0xee, 0x65, 0x86 ]
+
+ rfctest02 = encrypt (zero # [1]) 1 (zero # [2]) msg == out
+ where
+ out = [ 0xa3, 0xfb, 0xf0, 0x7d, 0xf3, 0xfa, 0x2f, 0xde, 0x4f, 0x37,
+ 0x6c, 0xa2, 0x3e, 0x82, 0x73, 0x70, 0x41, 0x60, 0x5d, 0x9f,
+ 0x4f, 0x4f, 0x57, 0xbd, 0x8c, 0xff, 0x2c, 0x1d, 0x4b, 0x79,
+ 0x55, 0xec, 0x2a, 0x97, 0x94, 0x8b, 0xd3, 0x72, 0x29, 0x15,
+ 0xc8, 0xf3, 0xd3, 0x37, 0xf7, 0xd3, 0x70, 0x05, 0x0e, 0x9e,
+ 0x96, 0xd6, 0x47, 0xb7, 0xc3, 0x9f, 0x56, 0xe0, 0x31, 0xca,
+ 0x5e, 0xb6, 0x25, 0x0d, 0x40, 0x42, 0xe0, 0x27, 0x85, 0xec,
+ 0xec, 0xfa, 0x4b, 0x4b, 0xb5, 0xe8, 0xea, 0xd0, 0x44, 0x0e,
+ 0x20, 0xb6, 0xe8, 0xdb, 0x09, 0xd8, 0x81, 0xa7, 0xc6, 0x13,
+ 0x2f, 0x42, 0x0e, 0x52, 0x79, 0x50, 0x42, 0xbd, 0xfa, 0x77,
+ 0x73, 0xd8, 0xa9, 0x05, 0x14, 0x47, 0xb3, 0x29, 0x1c, 0xe1,
+ 0x41, 0x1c, 0x68, 0x04, 0x65, 0x55, 0x2a, 0xa6, 0xc4, 0x05,
+ 0xb7, 0x76, 0x4d, 0x5e, 0x87, 0xbe, 0xa8, 0x5a, 0xd0, 0x0f,
+ 0x84, 0x49, 0xed, 0x8f, 0x72, 0xd0, 0xd6, 0x62, 0xab, 0x05,
+ 0x26, 0x91, 0xca, 0x66, 0x42, 0x4b, 0xc8, 0x6d, 0x2d, 0xf8,
+ 0x0e, 0xa4, 0x1f, 0x43, 0xab, 0xf9, 0x37, 0xd3, 0x25, 0x9d,
+ 0xc4, 0xb2, 0xd0, 0xdf, 0xb4, 0x8a, 0x6c, 0x91, 0x39, 0xdd,
+ 0xd7, 0xf7, 0x69, 0x66, 0xe9, 0x28, 0xe6, 0x35, 0x55, 0x3b,
+ 0xa7, 0x6c, 0x5c, 0x87, 0x9d, 0x7b, 0x35, 0xd4, 0x9e, 0xb2,
+ 0xe6, 0x2b, 0x08, 0x71, 0xcd, 0xac, 0x63, 0x89, 0x39, 0xe2,
+ 0x5e, 0x8a, 0x1e, 0x0e, 0xf9, 0xd5, 0x28, 0x0f, 0xa8, 0xca,
+ 0x32, 0x8b, 0x35, 0x1c, 0x3c, 0x76, 0x59, 0x89, 0xcb, 0xcf,
+ 0x3d, 0xaa, 0x8b, 0x6c, 0xcc, 0x3a, 0xaf, 0x9f, 0x39, 0x79,
+ 0xc9, 0x2b, 0x37, 0x20, 0xfc, 0x88, 0xdc, 0x95, 0xed, 0x84,
+ 0xa1, 0xbe, 0x05, 0x9c, 0x64, 0x99, 0xb9, 0xfd, 0xa2, 0x36,
+ 0xe7, 0xe8, 0x18, 0xb0, 0x4b, 0x0b, 0xc3, 0x9c, 0x1e, 0x87,
+ 0x6b, 0x19, 0x3b, 0xfe, 0x55, 0x69, 0x75, 0x3f, 0x88, 0x12,
+ 0x8c, 0xc0, 0x8a, 0xaa, 0x9b, 0x63, 0xd1, 0xa1, 0x6f, 0x80,
+ 0xef, 0x25, 0x54, 0xd7, 0x18, 0x9c, 0x41, 0x1f, 0x58, 0x69,
+ 0xca, 0x52, 0xc5, 0xb8, 0x3f, 0xa3, 0x6f, 0xf2, 0x16, 0xb9,
+ 0xc1, 0xd3, 0x00, 0x62, 0xbe, 0xbc, 0xfd, 0x2d, 0xc5, 0xbc,
+ 0xe0, 0x91, 0x19, 0x34, 0xfd, 0xa7, 0x9a, 0x86, 0xf6, 0xe6,
+ 0x98, 0xce, 0xd7, 0x59, 0xc3, 0xff, 0x9b, 0x64, 0x77, 0x33,
+ 0x8f, 0x3d, 0xa4, 0xf9, 0xcd, 0x85, 0x14, 0xea, 0x99, 0x82,
+ 0xcc, 0xaf, 0xb3, 0x41, 0xb2, 0x38, 0x4d, 0xd9, 0x02, 0xf3,
+ 0xd1, 0xab, 0x7a, 0xc6, 0x1d, 0xd2, 0x9c, 0x6f, 0x21, 0xba,
+ 0x5b, 0x86, 0x2f, 0x37, 0x30, 0xe3, 0x7c, 0xfd, 0xc4, 0xfd,
+ 0x80, 0x6c, 0x22, 0xf2, 0x21 ]
+
+ msg = [ 0x41, 0x6e, 0x79, 0x20, 0x73, 0x75, 0x62, 0x6d, 0x69, 0x73,
+ 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x74, 0x6f, 0x20, 0x74, 0x68,
+ 0x65, 0x20, 0x49, 0x45, 0x54, 0x46, 0x20, 0x69, 0x6e, 0x74,
+ 0x65, 0x6e, 0x64, 0x65, 0x64, 0x20, 0x62, 0x79, 0x20, 0x74,
+ 0x68, 0x65, 0x20, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x69, 0x62,
+ 0x75, 0x74, 0x6f, 0x72, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x70,
+ 0x75, 0x62, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e,
+ 0x20, 0x61, 0x73, 0x20, 0x61, 0x6c, 0x6c, 0x20, 0x6f, 0x72,
+ 0x20, 0x70, 0x61, 0x72, 0x74, 0x20, 0x6f, 0x66, 0x20, 0x61,
+ 0x6e, 0x20, 0x49, 0x45, 0x54, 0x46, 0x20, 0x49, 0x6e, 0x74,
+ 0x65, 0x72, 0x6e, 0x65, 0x74, 0x2d, 0x44, 0x72, 0x61, 0x66,
+ 0x74, 0x20, 0x6f, 0x72, 0x20, 0x52, 0x46, 0x43, 0x20, 0x61,
+ 0x6e, 0x64, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x73, 0x74, 0x61,
+ 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x20, 0x6d, 0x61, 0x64,
+ 0x65, 0x20, 0x77, 0x69, 0x74, 0x68, 0x69, 0x6e, 0x20, 0x74,
+ 0x68, 0x65, 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74,
+ 0x20, 0x6f, 0x66, 0x20, 0x61, 0x6e, 0x20, 0x49, 0x45, 0x54,
+ 0x46, 0x20, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79,
+ 0x20, 0x69, 0x73, 0x20, 0x63, 0x6f, 0x6e, 0x73, 0x69, 0x64,
+ 0x65, 0x72, 0x65, 0x64, 0x20, 0x61, 0x6e, 0x20, 0x22, 0x49,
+ 0x45, 0x54, 0x46, 0x20, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x69,
+ 0x62, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x2e, 0x20, 0x53,
+ 0x75, 0x63, 0x68, 0x20, 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d,
+ 0x65, 0x6e, 0x74, 0x73, 0x20, 0x69, 0x6e, 0x63, 0x6c, 0x75,
+ 0x64, 0x65, 0x20, 0x6f, 0x72, 0x61, 0x6c, 0x20, 0x73, 0x74,
+ 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x20, 0x69,
+ 0x6e, 0x20, 0x49, 0x45, 0x54, 0x46, 0x20, 0x73, 0x65, 0x73,
+ 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x2c, 0x20, 0x61, 0x73, 0x20,
+ 0x77, 0x65, 0x6c, 0x6c, 0x20, 0x61, 0x73, 0x20, 0x77, 0x72,
+ 0x69, 0x74, 0x74, 0x65, 0x6e, 0x20, 0x61, 0x6e, 0x64, 0x20,
+ 0x65, 0x6c, 0x65, 0x63, 0x74, 0x72, 0x6f, 0x6e, 0x69, 0x63,
+ 0x20, 0x63, 0x6f, 0x6d, 0x6d, 0x75, 0x6e, 0x69, 0x63, 0x61,
+ 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x20, 0x6d, 0x61, 0x64, 0x65,
+ 0x20, 0x61, 0x74, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x74, 0x69,
+ 0x6d, 0x65, 0x20, 0x6f, 0x72, 0x20, 0x70, 0x6c, 0x61, 0x63,
+ 0x65, 0x2c, 0x20, 0x77, 0x68, 0x69, 0x63, 0x68, 0x20, 0x61,
+ 0x72, 0x65, 0x20, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73,
+ 0x65, 0x64, 0x20, 0x74, 0x6f ]
+
+ rfctest03 = encrypt key 42 (zero # [2]) msg == out
+ where
+ key = [ 0x1c, 0x92, 0x40, 0xa5, 0xeb, 0x55, 0xd3, 0x8a, 0xf3, 0x33,
+ 0x88, 0x86, 0x04, 0xf6, 0xb5, 0xf0, 0x47, 0x39, 0x17, 0xc1,
+ 0x40, 0x2b, 0x80, 0x09, 0x9d, 0xca, 0x5c, 0xbc, 0x20, 0x70,
+ 0x75, 0xc0 ]
+ out = [ 0x27, 0x54, 0x77, 0x61, 0x73, 0x20, 0x62, 0x72, 0x69, 0x6c,
+ 0x6c, 0x69, 0x67, 0x2c, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x74,
+ 0x68, 0x65, 0x20, 0x73, 0x6c, 0x69, 0x74, 0x68, 0x79, 0x20,
+ 0x74, 0x6f, 0x76, 0x65, 0x73, 0x0a, 0x44, 0x69, 0x64, 0x20,
+ 0x67, 0x79, 0x72, 0x65, 0x20, 0x61, 0x6e, 0x64, 0x20, 0x67,
+ 0x69, 0x6d, 0x62, 0x6c, 0x65, 0x20, 0x69, 0x6e, 0x20, 0x74,
+ 0x68, 0x65, 0x20, 0x77, 0x61, 0x62, 0x65, 0x3a, 0x0a, 0x41,
+ 0x6c, 0x6c, 0x20, 0x6d, 0x69, 0x6d, 0x73, 0x79, 0x20, 0x77,
+ 0x65, 0x72, 0x65, 0x20, 0x74, 0x68, 0x65, 0x20, 0x62, 0x6f,
+ 0x72, 0x6f, 0x67, 0x6f, 0x76, 0x65, 0x73, 0x2c, 0x0a, 0x41,
+ 0x6e, 0x64, 0x20, 0x74, 0x68, 0x65, 0x20, 0x6d, 0x6f, 0x6d,
+ 0x65, 0x20, 0x72, 0x61, 0x74, 0x68, 0x73, 0x20, 0x6f, 0x75,
+ 0x74, 0x67, 0x72, 0x61, 0x62, 0x65, 0x2e ]
+
+ msg = [ 0x62, 0xe6, 0x34, 0x7f, 0x95, 0xed, 0x87, 0xa4, 0x5f, 0xfa,
+ 0xe7, 0x42, 0x6f, 0x27, 0xa1, 0xdf, 0x5f, 0xb6, 0x91, 0x10,
+ 0x04, 0x4c, 0x0d, 0x73, 0x11, 0x8e, 0xff, 0xa9, 0x5b, 0x01,
+ 0xe5, 0xcf, 0x16, 0x6d, 0x3d, 0xf2, 0xd7, 0x21, 0xca, 0xf9,
+ 0xb2, 0x1e, 0x5f, 0xb1, 0x4c, 0x61, 0x68, 0x71, 0xfd, 0x84,
+ 0xc5, 0x4f, 0x9d, 0x65, 0xb2, 0x83, 0x19, 0x6c, 0x7f, 0xe4,
+ 0xf6, 0x05, 0x53, 0xeb, 0xf3, 0x9c, 0x64, 0x02, 0xc4, 0x22,
+ 0x34, 0xe3, 0x2a, 0x35, 0x6b, 0x3e, 0x76, 0x43, 0x12, 0xa6,
+ 0x1a, 0x55, 0x32, 0x05, 0x57, 0x16, 0xea, 0xd6, 0x96, 0x25,
+ 0x68, 0xf8, 0x7d, 0x3f, 0x3f, 0x77, 0x04, 0xc6, 0xa8, 0xd1,
+ 0xbc, 0xd1, 0xbf, 0x4d, 0x50, 0xd6, 0x15, 0x4b, 0x6d, 0xa7,
+ 0x31, 0xb1, 0x87, 0xb5, 0x8d, 0xfd, 0x72, 0x8a, 0xfa, 0x36,
+ 0x75, 0x7a, 0x79, 0x7a, 0xc1, 0x88, 0xd1 ]
+
+property allTestsPass =
+ ([ // Basic tests
+ qround01, core01, core02
+ // Full RFC test vectors
+ , rfctest01, rfctest02, rfctest03
+ ] : [_]Bit) == ~zero // All test bits should equal one
+
+/* -------------------------------------------------------------------------- */
+/* -- Private utilities ----------------------------------------------------- */
+
+private
+ // Convert a round into a block, by splitting every 32-bit round entry
+ // into 4 bytes, and then serialize those values into a full block.
+ blocked : Round -> Block
+ blocked x = join (map toBytes x)
+ where
+ // This essentially splits a 32-bit number into 4-byte
+ // little-endian form, where 'rjoin' is the inverse and would merge
+ // 4 bytes as a 32-bit little endian number.
+ toBytes : [32] -> [4][8]
+ toBytes v = reverse (groupBy`{8} v)
+
+ // Map a function over a finite list.
+ map : { a, b, c }
+ (a -> b) -> [c]a -> [c]b
+ map f xs = [ f x | x <- xs ]
+
+ // Map a function iteratively over a seed value, producing an infinite
+ // list of successive function applications:
+ //
+ // iterate f 0 == [ 0, f 0, f (f 0), f (f (f 0)), ... ]
+ iterate : { a } (a -> a) -> a -> [inf]a
+ iterate f x = [x] # [ f v | v <- iterate f x ]
+ where
+ // NB: Needs a binded name in order to tie the recursive knot.
+ xs = [x] # [ f v | v <- xs ]
+
+ // rjoin = join . reverse
+ // This encodes a sequence of values as a little endian number
+ // e.g. [ 0xaa, 0xbb, 0xcc, 0xdd ] is serialized as \xdd\xcc\xbb\xaa
+ rjoin : {a, b, c}
+ ( fin a, fin c
+ ) => [c][a]b -> [a * c]b
+ rjoin x = join (reverse x)
diff --git a/security/nss/automation/saw/chacha20.saw b/security/nss/automation/saw/chacha20.saw
new file mode 100644
index 000000000..92145ab74
--- /dev/null
+++ b/security/nss/automation/saw/chacha20.saw
@@ -0,0 +1,40 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import "chacha20.cry" as chacha20;
+
+print "Proving ChaCha20 spec...";
+prove_print abc {{ chacha20::allTestsPass }};
+
+print "Loading LLVM bitcode...";
+m <- llvm_load_module "../../../dist/Debug/lib/libfreeblpriv3.so.bc";
+
+let SpecChaCha20 n = do {
+ llvm_ptr "output" (llvm_array n (llvm_int 8));
+ output <- llvm_var "*output" (llvm_array n (llvm_int 8));
+
+ llvm_ptr "plain" (llvm_array n (llvm_int 8));
+ plain <- llvm_var "*plain" (llvm_array n (llvm_int 8));
+
+ len <- llvm_var "len" (llvm_int 32);
+ llvm_assert_eq "len" {{ `n : [32] }};
+
+ llvm_ptr "k" (llvm_array 32 (llvm_int 8));
+ k <- llvm_var "*k" (llvm_array 32 (llvm_int 8));
+
+ llvm_ptr "n1" (llvm_array 12 (llvm_int 8));
+ n1 <- llvm_var "*n1" (llvm_array 12 (llvm_int 8));
+
+ ctr <- llvm_var "ctr" (llvm_int 32);
+
+ llvm_ensure_eq "*output" {{ chacha20::encrypt k ctr n1 plain }};
+
+ llvm_verify_tactic abc;
+};
+
+print "Proving equality for a single block...";
+time (llvm_verify m "Hacl_Chacha20_chacha20" [] (SpecChaCha20 64));
+
+print "Proving equality for multiple blocks...";
+time (llvm_verify m "Hacl_Chacha20_chacha20" [] (SpecChaCha20 256));
diff --git a/security/nss/automation/saw/poly1305-hacl.saw b/security/nss/automation/saw/poly1305-hacl.saw
new file mode 100644
index 000000000..a4dfff6d9
--- /dev/null
+++ b/security/nss/automation/saw/poly1305-hacl.saw
@@ -0,0 +1,38 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import "poly1305.cry" as poly1305;
+
+print "Proving Poly1305 spec...";
+prove_print abc {{ poly1305::allTestsPass }};
+
+print "Loading LLVM bitcode...";
+m <- llvm_load_module "../../../dist/Debug/lib/libfreeblpriv3.so.bc";
+
+let SpecPoly1305 n = do {
+ llvm_ptr "output" (llvm_array 16 (llvm_int 8));
+ output <- llvm_var "*output" (llvm_array 16 (llvm_int 8));
+
+ llvm_ptr "input" (llvm_array n (llvm_int 8));
+ input <- llvm_var "*input" (llvm_array n (llvm_int 8));
+
+ llvm_var "len1" (llvm_int 64);
+ llvm_ptr "k1" (llvm_array 32 (llvm_int 8));
+ k1 <- llvm_var "*k1" (llvm_array 32 (llvm_int 8));
+
+ llvm_assert_eq "*input" {{ zero : [n][8] }};
+ llvm_assert_eq "len1" {{ `n : [64] }};
+
+ llvm_assert_eq "*k1" {{ zero : [32][8] }};
+
+ let res = {{ poly1305::Poly1305 input (take`{16} k1) (drop`{16} k1) }};
+ llvm_ensure_eq "*output" {{ res }};
+
+ llvm_verify_tactic abc;
+};
+
+print "Proving equality for a single block...";
+// This is currently disabled as it takes way too long. We need to help Z3
+// prove this before we can enable it on Taskcluster.
+//time (llvm_verify m "Hacl_Poly1305_64_crypto_onetimeauth" [] (SpecPoly1305 16));
diff --git a/security/nss/automation/saw/poly1305.cry b/security/nss/automation/saw/poly1305.cry
new file mode 100644
index 000000000..6321a4f19
--- /dev/null
+++ b/security/nss/automation/saw/poly1305.cry
@@ -0,0 +1,336 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/* This file provides a spec of the Poly1305 one-time authenticator.
+ * See <https://tools.ietf.org/html/rfc7539> for details. */
+
+module poly1305 where
+
+P : [136]
+P = 2^^130 - 5
+
+Poly1305 : {n} (fin n) => [n][8] -> [16][8] -> [16][8] -> [16][8]
+Poly1305 msg r s = reverse (groupBy (drop ((rounds ! 0) + s')))
+ where
+ rounds = [zero] # [ Poly1305_block acc r' b | b <- blocks | acc <- rounds ]
+ r' = zero # (Poly1305_clamp (join (reverse r)))
+ s' = zero # (join (reverse s))
+ blocks = Poly1305_split msg
+
+private
+ // 0x0f - for r[3], r[7], r[11], r[15]
+ // 0xfc - for r[4], r[8], r[12]
+ Poly1305_clamp r = r && 0x0ffffffc0ffffffc0ffffffc0fffffff
+
+ // Poly1305_block : ((acc + msg) * r) % P
+ Poly1305_block : [136] -> [136] -> [136] -> [136]
+ Poly1305_block acc r msg = drop (prod % (zero # P))
+ where
+ acc' : [137]
+ // Add the current block to the accumulator.
+ acc' = (zero # acc) + (zero # msg)
+ prod : [273]
+ // Multiply the new accumulator value by r.
+ prod = ((zero : [137]) # r) * ((zero : [136]) # acc')
+
+ Poly1305_split : {n, nb, nf} (fin n, nf == n / 16, nb == (n + 15) / 16) => [n][8] -> [nb][136]
+ Poly1305_split msg = take ((h1 : [nf][136]) # h2)
+ where
+ // Split all full 16-byte blocks and append 0x01, then convert to LE.
+ h1 = [ join (reverse (b # [0x01])) | b <- groupBy`{16} (take msg)]
+ // Pad the remaining bytes (if any) and convert to LE.
+ h2 = [join (reverse ((drop`{nf * 16} msg) # [0x01] # zero))]
+
+/* -------------------------------------------------------------------------- */
+/* -- Tests ----------------------------------------------------------------- */
+
+private
+ // https://tools.ietf.org/html/rfc7539#section-2.5.2
+ rval1 = [0x85, 0xd6, 0xbe, 0x78, 0x57, 0x55, 0x6d, 0x33,
+ 0x7f, 0x44, 0x52, 0xfe, 0x42, 0xd5, 0x06, 0xa8]
+ sval1 = [0x01, 0x03, 0x80, 0x8a, 0xfb, 0x0d, 0xb2, 0xfd,
+ 0x4a, 0xbf, 0xf6, 0xaf, 0x41, 0x49, 0xf5, 0x1b]
+ text1 = [0x43, 0x72, 0x79, 0x70, 0x74, 0x6f, 0x67, 0x72,
+ 0x61, 0x70, 0x68, 0x69, 0x63, 0x20, 0x46, 0x6f,
+ 0x72, 0x75, 0x6d, 0x20, 0x52, 0x65, 0x73, 0x65,
+ 0x61, 0x72, 0x63, 0x68, 0x20, 0x47, 0x72, 0x6f,
+ 0x75, 0x70]
+
+ rfctest01 = Poly1305 text1 rval1 sval1
+ == [0xa8, 0x06, 0x1d, 0xc1, 0x30, 0x51, 0x36, 0xc6,
+ 0xc2, 0x2b, 0x8b, 0xaf, 0x0c, 0x01, 0x27, 0xa9]
+
+ // https://tools.ietf.org/html/rfc7539#appendix-A.3
+ // Test Vector #1
+ rval2 = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+ sval2 = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+ text2 = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+
+ rfctest02 = Poly1305 text2 rval2 sval2
+ == [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+
+ // https://tools.ietf.org/html/rfc7539#appendix-A.3
+ // Test Vector #2
+ rval3 = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+ sval3 = [0x36, 0xe5, 0xf6, 0xb5, 0xc5, 0xe0, 0x60, 0x70,
+ 0xf0, 0xef, 0xca, 0x96, 0x22, 0x7a, 0x86, 0x3e]
+ text3 = [0x41, 0x6e, 0x79, 0x20, 0x73, 0x75, 0x62, 0x6d,
+ 0x69, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x74,
+ 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x49, 0x45,
+ 0x54, 0x46, 0x20, 0x69, 0x6e, 0x74, 0x65, 0x6e,
+ 0x64, 0x65, 0x64, 0x20, 0x62, 0x79, 0x20, 0x74,
+ 0x68, 0x65, 0x20, 0x43, 0x6f, 0x6e, 0x74, 0x72,
+ 0x69, 0x62, 0x75, 0x74, 0x6f, 0x72, 0x20, 0x66,
+ 0x6f, 0x72, 0x20, 0x70, 0x75, 0x62, 0x6c, 0x69,
+ 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x61,
+ 0x73, 0x20, 0x61, 0x6c, 0x6c, 0x20, 0x6f, 0x72,
+ 0x20, 0x70, 0x61, 0x72, 0x74, 0x20, 0x6f, 0x66,
+ 0x20, 0x61, 0x6e, 0x20, 0x49, 0x45, 0x54, 0x46,
+ 0x20, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x65,
+ 0x74, 0x2d, 0x44, 0x72, 0x61, 0x66, 0x74, 0x20,
+ 0x6f, 0x72, 0x20, 0x52, 0x46, 0x43, 0x20, 0x61,
+ 0x6e, 0x64, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x73,
+ 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74,
+ 0x20, 0x6d, 0x61, 0x64, 0x65, 0x20, 0x77, 0x69,
+ 0x74, 0x68, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x65,
+ 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74,
+ 0x20, 0x6f, 0x66, 0x20, 0x61, 0x6e, 0x20, 0x49,
+ 0x45, 0x54, 0x46, 0x20, 0x61, 0x63, 0x74, 0x69,
+ 0x76, 0x69, 0x74, 0x79, 0x20, 0x69, 0x73, 0x20,
+ 0x63, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72,
+ 0x65, 0x64, 0x20, 0x61, 0x6e, 0x20, 0x22, 0x49,
+ 0x45, 0x54, 0x46, 0x20, 0x43, 0x6f, 0x6e, 0x74,
+ 0x72, 0x69, 0x62, 0x75, 0x74, 0x69, 0x6f, 0x6e,
+ 0x22, 0x2e, 0x20, 0x53, 0x75, 0x63, 0x68, 0x20,
+ 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e,
+ 0x74, 0x73, 0x20, 0x69, 0x6e, 0x63, 0x6c, 0x75,
+ 0x64, 0x65, 0x20, 0x6f, 0x72, 0x61, 0x6c, 0x20,
+ 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e,
+ 0x74, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x49, 0x45,
+ 0x54, 0x46, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69,
+ 0x6f, 0x6e, 0x73, 0x2c, 0x20, 0x61, 0x73, 0x20,
+ 0x77, 0x65, 0x6c, 0x6c, 0x20, 0x61, 0x73, 0x20,
+ 0x77, 0x72, 0x69, 0x74, 0x74, 0x65, 0x6e, 0x20,
+ 0x61, 0x6e, 0x64, 0x20, 0x65, 0x6c, 0x65, 0x63,
+ 0x74, 0x72, 0x6f, 0x6e, 0x69, 0x63, 0x20, 0x63,
+ 0x6f, 0x6d, 0x6d, 0x75, 0x6e, 0x69, 0x63, 0x61,
+ 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x20, 0x6d, 0x61,
+ 0x64, 0x65, 0x20, 0x61, 0x74, 0x20, 0x61, 0x6e,
+ 0x79, 0x20, 0x74, 0x69, 0x6d, 0x65, 0x20, 0x6f,
+ 0x72, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x2c,
+ 0x20, 0x77, 0x68, 0x69, 0x63, 0x68, 0x20, 0x61,
+ 0x72, 0x65, 0x20, 0x61, 0x64, 0x64, 0x72, 0x65,
+ 0x73, 0x73, 0x65, 0x64, 0x20, 0x74, 0x6f]
+
+ rfctest03 = Poly1305 text3 rval3 sval3
+ == [0x36, 0xe5, 0xf6, 0xb5, 0xc5, 0xe0, 0x60, 0x70,
+ 0xf0, 0xef, 0xca, 0x96, 0x22, 0x7a, 0x86, 0x3e]
+
+ // https://tools.ietf.org/html/rfc7539#appendix-A.3
+ // Test Vector #3
+ rval4 = [0x36, 0xe5, 0xf6, 0xb5, 0xc5, 0xe0, 0x60, 0x70,
+ 0xf0, 0xef, 0xca, 0x96, 0x22, 0x7a, 0x86, 0x3e]
+ sval4 = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+ text4 = [0x41, 0x6e, 0x79, 0x20, 0x73, 0x75, 0x62, 0x6d,
+ 0x69, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x74,
+ 0x6f, 0x20, 0x74, 0x68, 0x65, 0x20, 0x49, 0x45,
+ 0x54, 0x46, 0x20, 0x69, 0x6e, 0x74, 0x65, 0x6e,
+ 0x64, 0x65, 0x64, 0x20, 0x62, 0x79, 0x20, 0x74,
+ 0x68, 0x65, 0x20, 0x43, 0x6f, 0x6e, 0x74, 0x72,
+ 0x69, 0x62, 0x75, 0x74, 0x6f, 0x72, 0x20, 0x66,
+ 0x6f, 0x72, 0x20, 0x70, 0x75, 0x62, 0x6c, 0x69,
+ 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x61,
+ 0x73, 0x20, 0x61, 0x6c, 0x6c, 0x20, 0x6f, 0x72,
+ 0x20, 0x70, 0x61, 0x72, 0x74, 0x20, 0x6f, 0x66,
+ 0x20, 0x61, 0x6e, 0x20, 0x49, 0x45, 0x54, 0x46,
+ 0x20, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x65,
+ 0x74, 0x2d, 0x44, 0x72, 0x61, 0x66, 0x74, 0x20,
+ 0x6f, 0x72, 0x20, 0x52, 0x46, 0x43, 0x20, 0x61,
+ 0x6e, 0x64, 0x20, 0x61, 0x6e, 0x79, 0x20, 0x73,
+ 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e, 0x74,
+ 0x20, 0x6d, 0x61, 0x64, 0x65, 0x20, 0x77, 0x69,
+ 0x74, 0x68, 0x69, 0x6e, 0x20, 0x74, 0x68, 0x65,
+ 0x20, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74,
+ 0x20, 0x6f, 0x66, 0x20, 0x61, 0x6e, 0x20, 0x49,
+ 0x45, 0x54, 0x46, 0x20, 0x61, 0x63, 0x74, 0x69,
+ 0x76, 0x69, 0x74, 0x79, 0x20, 0x69, 0x73, 0x20,
+ 0x63, 0x6f, 0x6e, 0x73, 0x69, 0x64, 0x65, 0x72,
+ 0x65, 0x64, 0x20, 0x61, 0x6e, 0x20, 0x22, 0x49,
+ 0x45, 0x54, 0x46, 0x20, 0x43, 0x6f, 0x6e, 0x74,
+ 0x72, 0x69, 0x62, 0x75, 0x74, 0x69, 0x6f, 0x6e,
+ 0x22, 0x2e, 0x20, 0x53, 0x75, 0x63, 0x68, 0x20,
+ 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e,
+ 0x74, 0x73, 0x20, 0x69, 0x6e, 0x63, 0x6c, 0x75,
+ 0x64, 0x65, 0x20, 0x6f, 0x72, 0x61, 0x6c, 0x20,
+ 0x73, 0x74, 0x61, 0x74, 0x65, 0x6d, 0x65, 0x6e,
+ 0x74, 0x73, 0x20, 0x69, 0x6e, 0x20, 0x49, 0x45,
+ 0x54, 0x46, 0x20, 0x73, 0x65, 0x73, 0x73, 0x69,
+ 0x6f, 0x6e, 0x73, 0x2c, 0x20, 0x61, 0x73, 0x20,
+ 0x77, 0x65, 0x6c, 0x6c, 0x20, 0x61, 0x73, 0x20,
+ 0x77, 0x72, 0x69, 0x74, 0x74, 0x65, 0x6e, 0x20,
+ 0x61, 0x6e, 0x64, 0x20, 0x65, 0x6c, 0x65, 0x63,
+ 0x74, 0x72, 0x6f, 0x6e, 0x69, 0x63, 0x20, 0x63,
+ 0x6f, 0x6d, 0x6d, 0x75, 0x6e, 0x69, 0x63, 0x61,
+ 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x20, 0x6d, 0x61,
+ 0x64, 0x65, 0x20, 0x61, 0x74, 0x20, 0x61, 0x6e,
+ 0x79, 0x20, 0x74, 0x69, 0x6d, 0x65, 0x20, 0x6f,
+ 0x72, 0x20, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x2c,
+ 0x20, 0x77, 0x68, 0x69, 0x63, 0x68, 0x20, 0x61,
+ 0x72, 0x65, 0x20, 0x61, 0x64, 0x64, 0x72, 0x65,
+ 0x73, 0x73, 0x65, 0x64, 0x20, 0x74, 0x6f]
+
+ rfctest04 = Poly1305 text4 rval4 sval4
+ == [0xf3, 0x47, 0x7e, 0x7c, 0xd9, 0x54, 0x17, 0xaf,
+ 0x89, 0xa6, 0xb8, 0x79, 0x4c, 0x31, 0x0c, 0xf0]
+
+ // https://tools.ietf.org/html/rfc7539#appendix-A.3
+ // Test Vector #4
+ rval5 = [0x1c, 0x92, 0x40, 0xa5, 0xeb, 0x55, 0xd3, 0x8a,
+ 0xf3, 0x33, 0x88, 0x86, 0x04, 0xf6, 0xb5, 0xf0]
+ sval5 = [0x47, 0x39, 0x17, 0xc1, 0x40, 0x2b, 0x80, 0x09,
+ 0x9d, 0xca, 0x5c, 0xbc, 0x20, 0x70, 0x75, 0xc0]
+ text5 = [0x27, 0x54, 0x77, 0x61, 0x73, 0x20, 0x62, 0x72,
+ 0x69, 0x6c, 0x6c, 0x69, 0x67, 0x2c, 0x20, 0x61,
+ 0x6e, 0x64, 0x20, 0x74, 0x68, 0x65, 0x20, 0x73,
+ 0x6c, 0x69, 0x74, 0x68, 0x79, 0x20, 0x74, 0x6f,
+ 0x76, 0x65, 0x73, 0x0a, 0x44, 0x69, 0x64, 0x20,
+ 0x67, 0x79, 0x72, 0x65, 0x20, 0x61, 0x6e, 0x64,
+ 0x20, 0x67, 0x69, 0x6d, 0x62, 0x6c, 0x65, 0x20,
+ 0x69, 0x6e, 0x20, 0x74, 0x68, 0x65, 0x20, 0x77,
+ 0x61, 0x62, 0x65, 0x3a, 0x0a, 0x41, 0x6c, 0x6c,
+ 0x20, 0x6d, 0x69, 0x6d, 0x73, 0x79, 0x20, 0x77,
+ 0x65, 0x72, 0x65, 0x20, 0x74, 0x68, 0x65, 0x20,
+ 0x62, 0x6f, 0x72, 0x6f, 0x67, 0x6f, 0x76, 0x65,
+ 0x73, 0x2c, 0x0a, 0x41, 0x6e, 0x64, 0x20, 0x74,
+ 0x68, 0x65, 0x20, 0x6d, 0x6f, 0x6d, 0x65, 0x20,
+ 0x72, 0x61, 0x74, 0x68, 0x73, 0x20, 0x6f, 0x75,
+ 0x74, 0x67, 0x72, 0x61, 0x62, 0x65, 0x2e]
+
+ rfctest05 = Poly1305 text5 rval5 sval5
+ == [0x45, 0x41, 0x66, 0x9a, 0x7e, 0xaa, 0xee, 0x61,
+ 0xe7, 0x08, 0xdc, 0x7c, 0xbc, 0xc5, 0xeb, 0x62]
+
+ // https://tools.ietf.org/html/rfc7539#appendix-A.3
+ // Test Vector #5
+ rval6 = [0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+ sval6 = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+ text6 = [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]
+
+ rfctest06 = Poly1305 text6 rval6 sval6
+ == [0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+
+ // https://tools.ietf.org/html/rfc7539#appendix-A.3
+ // Test Vector #6
+ rval7 = [0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+ sval7 = [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]
+ text7 = [0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+
+ rfctest07 = Poly1305 text7 rval7 sval7
+ == [0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+
+ // https://tools.ietf.org/html/rfc7539#appendix-A.3
+ // Test Vector #7
+ rval8 = [0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+ sval8 = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+ text8 = [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xf0, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0x11, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+
+ rfctest08 = Poly1305 text8 rval8 sval8
+ == [0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+
+ // https://tools.ietf.org/html/rfc7539#appendix-A.3
+ // Test Vector #8
+ rval9 = [0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+ sval9 = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+ text9 = [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xfb, 0xfe, 0xfe, 0xfe, 0xfe, 0xfe, 0xfe, 0xfe,
+ 0xfe, 0xfe, 0xfe, 0xfe, 0xfe, 0xfe, 0xfe, 0xfe,
+ 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
+ 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01]
+
+ rfctest09 = Poly1305 text9 rval9 sval9
+ == [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+
+ // https://tools.ietf.org/html/rfc7539#appendix-A.3
+ // Test Vector #9
+ rval10 = [0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+ sval10 = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+ text10 = [0xfd, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]
+
+ rfctest10 = Poly1305 text10 rval10 sval10
+ == [0xfa, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
+ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]
+
+ // https://tools.ietf.org/html/rfc7539#appendix-A.3
+ // Test Vector #10
+ rval11 = [0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+ sval11 = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+ text11 = [0xe3, 0x35, 0x94, 0xd7, 0x50, 0x5e, 0x43, 0xb9,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x33, 0x94, 0xd7, 0x50, 0x5e, 0x43, 0x79, 0xcd,
+ 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+
+ rfctest11 = Poly1305 text11 rval11 sval11
+ == [0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x55, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+
+ // https://tools.ietf.org/html/rfc7539#appendix-A.3
+ // Test Vector #11
+ rval12 = [0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+ sval12 = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+ text12 = [0xe3, 0x35, 0x94, 0xd7, 0x50, 0x5e, 0x43, 0xb9,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x33, 0x94, 0xd7, 0x50, 0x5e, 0x43, 0x79, 0xcd,
+ 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+
+ rfctest12 = Poly1305 text12 rval12 sval12
+ == [0x13, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
+
+property allTestsPass =
+ ([ // Full RFC test vectors
+ rfctest01, rfctest02, rfctest03, rfctest04,
+ rfctest05, rfctest06, rfctest07, rfctest08,
+ rfctest09, rfctest10, rfctest11, rfctest12
+ ] : [_]Bit) == ~zero // All test bits should equal one
diff --git a/security/nss/automation/saw/poly1305.saw b/security/nss/automation/saw/poly1305.saw
new file mode 100644
index 000000000..44be1e3e0
--- /dev/null
+++ b/security/nss/automation/saw/poly1305.saw
@@ -0,0 +1,47 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import "poly1305.cry" as poly1305;
+
+print "Proving Poly1305 spec...";
+prove_print abc {{ poly1305::allTestsPass }};
+
+print "Loading LLVM bitcode...";
+m <- llvm_load_module "../../../dist/Debug/lib/libfreeblpriv3.so.bc";
+
+let SpecPoly1305 n = do {
+ llvm_ptr "out" (llvm_array 16 (llvm_int 8));
+ out <- llvm_var "*out" (llvm_array 16 (llvm_int 8));
+
+ llvm_ptr "ad" (llvm_array 16 (llvm_int 8));
+ ad <- llvm_var "*ad" (llvm_array 16 (llvm_int 8));
+
+ adLen <- llvm_var "adLen" (llvm_int 32);
+
+ llvm_ptr "ciphertext" (llvm_array n (llvm_int 8));
+ ciphertext <- llvm_var "*ciphertext" (llvm_array n (llvm_int 8));
+
+ ciphertextLen <- llvm_var "ciphertextLen" (llvm_int 32);
+
+ llvm_ptr "key" (llvm_array 32 (llvm_int 8));
+ key <- llvm_var "*key" (llvm_array 32 (llvm_int 8));
+
+ llvm_assert_eq "*ad" {{ zero : [16][8] }};
+ llvm_assert_eq "adLen" {{ 16 : [32] }};
+
+ llvm_assert_eq "*ciphertext" {{ zero : [n][8] }};
+ llvm_assert_eq "ciphertextLen" {{ `n : [32] }};
+
+ llvm_assert_eq "*key" {{ zero : [32][8] }};
+
+ let res = {{ poly1305::Poly1305 (ad # ciphertext # [16, 0, 0, 0, 0, 0, 0, 0] # [`n, 0, 0, 0, 0, 0, 0, 0]) (take`{16} key) (drop`{16} key) }};
+ llvm_ensure_eq "*out" {{ res }};
+
+ llvm_verify_tactic abc;
+};
+
+print "Proving equality for a single block...";
+// This is currently disabled as it takes way too long. We need to help Z3
+// prove this before we can enable it on Taskcluster.
+//time (llvm_verify m "Poly1305Do" [] (SpecPoly1305 16));
diff --git a/security/nss/automation/taskcluster/docker-clang-3.9/setup.sh b/security/nss/automation/taskcluster/docker-clang-3.9/setup.sh
index 7b7d534e6..3076667a6 100644
--- a/security/nss/automation/taskcluster/docker-clang-3.9/setup.sh
+++ b/security/nss/automation/taskcluster/docker-clang-3.9/setup.sh
@@ -25,8 +25,8 @@ apt-get -y update
apt-get install -y --no-install-recommends ${apt_packages[@]}
# Download clang.
-curl -LO http://releases.llvm.org/3.9.1/clang+llvm-3.9.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz
-curl -LO http://releases.llvm.org/3.9.1/clang+llvm-3.9.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz.sig
+curl -LO https://releases.llvm.org/3.9.1/clang+llvm-3.9.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz
+curl -LO https://releases.llvm.org/3.9.1/clang+llvm-3.9.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz.sig
# Verify the signature.
gpg --keyserver pool.sks-keyservers.net --recv-keys B6C8F98282B944E3B0D5C2530FC3042E345AD05D
gpg --verify *.tar.xz.sig
diff --git a/security/nss/automation/taskcluster/docker-decision/Dockerfile b/security/nss/automation/taskcluster/docker-decision/Dockerfile
index 35777c0b7..473ce64ba 100644
--- a/security/nss/automation/taskcluster/docker-decision/Dockerfile
+++ b/security/nss/automation/taskcluster/docker-decision/Dockerfile
@@ -12,6 +12,9 @@ RUN chmod +x /home/worker/bin/*
ADD setup.sh /tmp/setup.sh
RUN bash /tmp/setup.sh
+# Change user.
+USER worker
+
# Env variables.
ENV HOME /home/worker
ENV SHELL /bin/bash
diff --git a/security/nss/automation/taskcluster/docker-decision/bin/checkout.sh b/security/nss/automation/taskcluster/docker-decision/bin/checkout.sh
index 9167f6bda..0cdd2ac40 100644
--- a/security/nss/automation/taskcluster/docker-decision/bin/checkout.sh
+++ b/security/nss/automation/taskcluster/docker-decision/bin/checkout.sh
@@ -2,11 +2,6 @@
set -v -e -x
-if [ $(id -u) = 0 ]; then
- # Drop privileges by re-running this script.
- exec su worker $0
-fi
-
# Default values for testing.
REVISION=${NSS_HEAD_REVISION:-default}
REPOSITORY=${NSS_HEAD_REPOSITORY:-https://hg.mozilla.org/projects/nss}
diff --git a/security/nss/automation/taskcluster/docker-gcc-4.4/Dockerfile b/security/nss/automation/taskcluster/docker-gcc-4.4/Dockerfile
new file mode 100644
index 000000000..3330c007f
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-gcc-4.4/Dockerfile
@@ -0,0 +1,30 @@
+FROM ubuntu:14.04
+MAINTAINER Tim Taubert <ttaubert@mozilla.com>
+
+RUN useradd -d /home/worker -s /bin/bash -m worker
+WORKDIR /home/worker
+
+# Add build and test scripts.
+ADD bin /home/worker/bin
+RUN chmod +x /home/worker/bin/*
+
+# Install dependencies.
+ADD setup.sh /tmp/setup.sh
+RUN bash /tmp/setup.sh
+
+# Change user.
+USER worker
+
+# Env variables.
+ENV HOME /home/worker
+ENV SHELL /bin/bash
+ENV USER worker
+ENV LOGNAME worker
+ENV HOSTNAME taskcluster-worker
+ENV LANG en_US.UTF-8
+ENV LC_ALL en_US.UTF-8
+ENV HOST localhost
+ENV DOMSUF localdomain
+
+# Set a default command for debugging.
+CMD ["/bin/bash", "--login"]
diff --git a/security/nss/automation/taskcluster/docker-gcc-4.4/bin/checkout.sh b/security/nss/automation/taskcluster/docker-gcc-4.4/bin/checkout.sh
new file mode 100644
index 000000000..9167f6bda
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-gcc-4.4/bin/checkout.sh
@@ -0,0 +1,20 @@
+#!/usr/bin/env bash
+
+set -v -e -x
+
+if [ $(id -u) = 0 ]; then
+ # Drop privileges by re-running this script.
+ exec su worker $0
+fi
+
+# Default values for testing.
+REVISION=${NSS_HEAD_REVISION:-default}
+REPOSITORY=${NSS_HEAD_REPOSITORY:-https://hg.mozilla.org/projects/nss}
+
+# Clone NSS.
+for i in 0 2 5; do
+ sleep $i
+ hg clone -r $REVISION $REPOSITORY nss && exit 0
+ rm -rf nss
+done
+exit 1
diff --git a/security/nss/automation/taskcluster/docker-gcc-4.4/setup.sh b/security/nss/automation/taskcluster/docker-gcc-4.4/setup.sh
new file mode 100644
index 000000000..f6325d966
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-gcc-4.4/setup.sh
@@ -0,0 +1,30 @@
+#!/usr/bin/env bash
+
+set -v -e -x
+
+# Update packages.
+export DEBIAN_FRONTEND=noninteractive
+apt-get -y update && apt-get -y upgrade
+
+apt_packages=()
+apt_packages+=('ca-certificates')
+apt_packages+=('g++-4.4')
+apt_packages+=('gcc-4.4')
+apt_packages+=('locales')
+apt_packages+=('make')
+apt_packages+=('mercurial')
+apt_packages+=('zlib1g-dev')
+
+# Install packages.
+apt-get -y update
+apt-get install -y --no-install-recommends ${apt_packages[@]}
+
+locale-gen en_US.UTF-8
+dpkg-reconfigure locales
+
+# Cleanup.
+rm -rf ~/.ccache ~/.cache
+apt-get autoremove -y
+apt-get clean
+apt-get autoclean
+rm $0
diff --git a/security/nss/automation/taskcluster/docker-hacl/Dockerfile b/security/nss/automation/taskcluster/docker-hacl/Dockerfile
new file mode 100644
index 000000000..63f9a24e2
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-hacl/Dockerfile
@@ -0,0 +1,30 @@
+FROM ubuntu:xenial
+
+MAINTAINER Franziskus Kiefer <franziskuskiefer@gmail.com>
+# Based on the HACL* image from Benjamin Beurdouche and
+# the original F* formula with Daniel Fabian
+
+# Pinned versions of HACL* (F* and KreMLin are pinned as submodules)
+ENV haclrepo https://github.com/franziskuskiefer/hacl-star.git
+
+# Define versions of dependencies
+ENV opamv 4.04.2
+ENV haclversion 668d6cf274c33bbe2e951e3a84b73f2b6442a51f
+
+# Install required packages and set versions
+ADD setup.sh /tmp/setup.sh
+RUN bash /tmp/setup.sh
+
+# Create user, add scripts.
+RUN useradd -ms /bin/bash worker
+WORKDIR /home/worker
+ADD bin /home/worker/bin
+RUN chmod +x /home/worker/bin/*
+USER worker
+
+# Build F*, HACL*, verify. Install a few more dependencies.
+ENV OPAMYES true
+ENV PATH "/home/worker/hacl-star/dependencies/z3/bin:$PATH"
+ADD setup-user.sh /tmp/setup-user.sh
+ADD license.txt /tmp/license.txt
+RUN bash /tmp/setup-user.sh
diff --git a/security/nss/automation/taskcluster/docker-hacl/bin/checkout.sh b/security/nss/automation/taskcluster/docker-hacl/bin/checkout.sh
new file mode 100644
index 000000000..9167f6bda
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-hacl/bin/checkout.sh
@@ -0,0 +1,20 @@
+#!/usr/bin/env bash
+
+set -v -e -x
+
+if [ $(id -u) = 0 ]; then
+ # Drop privileges by re-running this script.
+ exec su worker $0
+fi
+
+# Default values for testing.
+REVISION=${NSS_HEAD_REVISION:-default}
+REPOSITORY=${NSS_HEAD_REPOSITORY:-https://hg.mozilla.org/projects/nss}
+
+# Clone NSS.
+for i in 0 2 5; do
+ sleep $i
+ hg clone -r $REVISION $REPOSITORY nss && exit 0
+ rm -rf nss
+done
+exit 1
diff --git a/security/nss/automation/taskcluster/docker-hacl/license.txt b/security/nss/automation/taskcluster/docker-hacl/license.txt
new file mode 100644
index 000000000..03d25c4d3
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-hacl/license.txt
@@ -0,0 +1,15 @@
+/* Copyright 2016-2017 INRIA and Microsoft Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
diff --git a/security/nss/automation/taskcluster/docker-hacl/setup-user.sh b/security/nss/automation/taskcluster/docker-hacl/setup-user.sh
new file mode 100644
index 000000000..b8accaf58
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-hacl/setup-user.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+
+set -v -e -x
+
+# Prepare build (OCaml packages)
+opam init
+echo ". /home/worker/.opam/opam-init/init.sh > /dev/null 2> /dev/null || true" >> .bashrc
+opam switch -v ${opamv}
+opam install ocamlfind batteries sqlite3 fileutils yojson ppx_deriving_yojson zarith pprint menhir ulex process fix wasm stdint
+
+# Get the HACL* code
+git clone ${haclrepo} hacl-star
+git -C hacl-star checkout ${haclversion}
+
+# Prepare submodules, and build, verify, test, and extract c code
+# This caches the extracted c code (pins the HACL* version). All we need to do
+# on CI now is comparing the code in this docker image with the one in NSS.
+opam config exec -- make -C hacl-star prepare -j$(nproc)
+make -C hacl-star verify-nss -j$(nproc)
+make -C hacl-star -f Makefile.build snapshots/nss -j$(nproc)
+KOPTS="-funroll-loops 5" make -C hacl-star/code/curve25519 test -j$(nproc)
+make -C hacl-star/code/salsa-family test -j$(nproc)
+make -C hacl-star/code/poly1305 test -j$(nproc)
+
+# Cleanup.
+rm -rf ~/.ccache ~/.cache
diff --git a/security/nss/automation/taskcluster/docker-hacl/setup.sh b/security/nss/automation/taskcluster/docker-hacl/setup.sh
new file mode 100644
index 000000000..f5f8bd7d5
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-hacl/setup.sh
@@ -0,0 +1,30 @@
+#!/usr/bin/env bash
+
+set -v -e -x
+
+# Update packages.
+export DEBIAN_FRONTEND=noninteractive
+apt-get -qq update
+apt-get install --yes libssl-dev libsqlite3-dev g++-5 gcc-5 m4 make opam pkg-config python libgmp3-dev cmake curl libtool-bin autoconf wget locales
+update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-5 200
+update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-5 200
+
+# Get clang-format-3.9
+curl -LO https://releases.llvm.org/3.9.1/clang+llvm-3.9.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz
+curl -LO https://releases.llvm.org/3.9.1/clang+llvm-3.9.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz.sig
+# Verify the signature.
+gpg --keyserver pool.sks-keyservers.net --recv-keys B6C8F98282B944E3B0D5C2530FC3042E345AD05D
+gpg --verify *.tar.xz.sig
+# Install into /usr/local/.
+tar xJvf *.tar.xz -C /usr/local --strip-components=1
+# Cleanup.
+rm *.tar.xz*
+
+locale-gen en_US.UTF-8
+dpkg-reconfigure locales
+
+# Cleanup.
+rm -rf ~/.ccache ~/.cache
+apt-get autoremove -y
+apt-get clean
+apt-get autoclean
diff --git a/security/nss/automation/taskcluster/docker-saw/Dockerfile b/security/nss/automation/taskcluster/docker-saw/Dockerfile
new file mode 100644
index 000000000..a481ba048
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-saw/Dockerfile
@@ -0,0 +1,46 @@
+FROM ubuntu:latest
+MAINTAINER Tim Taubert <ttaubert@mozilla.com>
+
+RUN useradd -d /home/worker -s /bin/bash -m worker
+WORKDIR /home/worker
+
+ENV DEBIAN_FRONTEND noninteractive
+
+RUN apt-get update && apt-get install -y \
+ binutils \
+ build-essential \
+ bzip2 \
+ clang-3.8 \
+ curl \
+ gcc-multilib \
+ g++-multilib \
+ gyp \
+ lib32z1-dev \
+ mercurial \
+ ninja-build \
+ unzip \
+ zlib1g-dev
+
+# Add missing LLVM plugin for gold linker.
+ADD LLVMgold.so.zip /usr/lib/llvm-3.8/lib/LLVMgold.so.zip
+RUN unzip /usr/lib/llvm-3.8/lib/LLVMgold.so.zip -d /usr/lib/llvm-3.8/lib/
+
+# Install SAW/Cryptol.
+RUN curl -LO https://saw.galois.com/builds/nightly/saw-0.2-2018-01-14-Ubuntu14.04-64.tar.gz && \
+ tar xzvf saw-*.tar.gz -C /usr/local --strip-components=1 && \
+ rm saw-*.tar.gz
+
+# Install Z3.
+RUN curl -LO https://github.com/Z3Prover/z3/releases/download/z3-4.6.0/z3-4.6.0-x64-ubuntu-16.04.zip && \
+ unzip z3*.zip && \
+ cp -r z3*/* /usr/local/ && \
+ rm -fr z3*
+
+ADD bin /home/worker/bin
+RUN chmod +x /home/worker/bin/*
+
+# Change user.
+USER worker
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
diff --git a/security/nss/automation/taskcluster/docker-saw/LLVMgold.so.zip b/security/nss/automation/taskcluster/docker-saw/LLVMgold.so.zip
new file mode 100644
index 000000000..b5e5a593d
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-saw/LLVMgold.so.zip
Binary files differ
diff --git a/security/nss/automation/taskcluster/docker-saw/bin/checkout.sh b/security/nss/automation/taskcluster/docker-saw/bin/checkout.sh
new file mode 100644
index 000000000..0cdd2ac40
--- /dev/null
+++ b/security/nss/automation/taskcluster/docker-saw/bin/checkout.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env bash
+
+set -v -e -x
+
+# Default values for testing.
+REVISION=${NSS_HEAD_REVISION:-default}
+REPOSITORY=${NSS_HEAD_REPOSITORY:-https://hg.mozilla.org/projects/nss}
+
+# Clone NSS.
+for i in 0 2 5; do
+ sleep $i
+ hg clone -r $REVISION $REPOSITORY nss && exit 0
+ rm -rf nss
+done
+exit 1
diff --git a/security/nss/automation/taskcluster/docker/setup.sh b/security/nss/automation/taskcluster/docker/setup.sh
index 3ba4e854e..7b90b2e69 100644
--- a/security/nss/automation/taskcluster/docker/setup.sh
+++ b/security/nss/automation/taskcluster/docker/setup.sh
@@ -12,6 +12,7 @@ apt-get install -y --no-install-recommends apt-utils
apt_packages=()
apt_packages+=('build-essential')
apt_packages+=('ca-certificates')
+apt_packages+=('clang-5.0')
apt_packages+=('curl')
apt_packages+=('npm')
apt_packages+=('git')
@@ -47,16 +48,17 @@ echo "deb http://ppa.launchpad.net/ubuntu-toolchain-r/test/ubuntu xenial main" >
apt-get -y update
apt-get install -y --no-install-recommends ${apt_packages[@]}
-# Download clang.
-curl -LO http://releases.llvm.org/4.0.0/clang+llvm-4.0.0-x86_64-linux-gnu-ubuntu-16.04.tar.xz
-curl -LO http://releases.llvm.org/4.0.0/clang+llvm-4.0.0-x86_64-linux-gnu-ubuntu-16.04.tar.xz.sig
-# Verify the signature.
-gpg --keyserver pool.sks-keyservers.net --recv-keys B6C8F98282B944E3B0D5C2530FC3042E345AD05D
-gpg --verify *.tar.xz.sig
-# Install into /usr/local/.
-tar xJvf *.tar.xz -C /usr/local --strip-components=1
-# Cleanup.
-rm *.tar.xz*
+# Latest version of abigail-tools
+apt-get install -y libxml2-dev autoconf libelf-dev libdw-dev libtool
+git clone git://sourceware.org/git/libabigail.git
+cd ./libabigail
+autoreconf -fi
+./configure --prefix=/usr --disable-static --disable-apidoc --disable-manual
+make
+make install
+cd ..
+apt-get remove -y libxml2-dev autoconf libtool
+rm -rf libabigail
# Install latest Rust (stable).
su worker -c "curl https://sh.rustup.rs -sSf | sh -s -- -y"
diff --git a/security/nss/automation/taskcluster/graph/src/context_hash.js b/security/nss/automation/taskcluster/graph/src/context_hash.js
index f0a2e9a88..0699a0590 100644
--- a/security/nss/automation/taskcluster/graph/src/context_hash.js
+++ b/security/nss/automation/taskcluster/graph/src/context_hash.js
@@ -27,14 +27,24 @@ function collectFilesInDirectory(dir) {
});
}
-// Compute a context hash for the given context path.
-export default function (context_path) {
+// A list of hashes for each file in the given path.
+function collectFileHashes(context_path) {
let root = path.join(__dirname, "../../../..");
let dir = path.join(root, context_path);
let files = collectFilesInDirectory(dir).sort();
- let hashes = files.map(file => {
+
+ return files.map(file => {
return sha256(file + "|" + fs.readFileSync(file, "utf-8"));
});
+}
+
+// Compute a context hash for the given context path.
+export default function (context_path) {
+ // Regenerate all images when the image_builder changes.
+ let hashes = collectFileHashes("automation/taskcluster/image_builder");
+
+ // Regenerate images when the image itself changes.
+ hashes = hashes.concat(collectFileHashes(context_path));
// Generate a new prefix every month to ensure the image stays buildable.
let now = new Date();
diff --git a/security/nss/automation/taskcluster/graph/src/extend.js b/security/nss/automation/taskcluster/graph/src/extend.js
index d541a1a3b..ee9ac9b74 100644
--- a/security/nss/automation/taskcluster/graph/src/extend.js
+++ b/security/nss/automation/taskcluster/graph/src/extend.js
@@ -15,15 +15,34 @@ const LINUX_CLANG39_IMAGE = {
path: "automation/taskcluster/docker-clang-3.9"
};
+const LINUX_GCC44_IMAGE = {
+ name: "linux-gcc-4.4",
+ path: "automation/taskcluster/docker-gcc-4.4"
+};
+
const FUZZ_IMAGE = {
name: "fuzz",
path: "automation/taskcluster/docker-fuzz"
};
+const HACL_GEN_IMAGE = {
+ name: "hacl",
+ path: "automation/taskcluster/docker-hacl"
+};
+
+const SAW_IMAGE = {
+ name: "saw",
+ path: "automation/taskcluster/docker-saw"
+};
+
const WINDOWS_CHECKOUT_CMD =
"bash -c \"hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss || " +
"(sleep 2; hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss) || " +
"(sleep 5; hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss)\"";
+const MAC_CHECKOUT_CMD = ["bash", "-c",
+ "hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss || " +
+ "(sleep 2; hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss) || " +
+ "(sleep 5; hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss)"];
/*****************************************************************************/
@@ -51,6 +70,16 @@ queue.filter(task => {
if (task.platform == "aarch64") {
return false;
}
+
+ // No mac
+ if (task.platform == "mac") {
+ return false;
+ }
+ }
+
+ if (task.tests == "fips" &&
+ (task.platform == "mac" || task.platform == "aarch64")) {
+ return false;
}
// Only old make builds have -Ddisable_libpkix=0 and can run chain tests.
@@ -59,13 +88,13 @@ queue.filter(task => {
}
if (task.group == "Test") {
- // Don't run test builds on old make platforms
- if (task.collection == "make") {
+ // Don't run test builds on old make platforms, and not for fips gyp.
+ if (task.collection == "make" || task.collection == "fips") {
return false;
}
}
- // Don't run additional hardware tests on ARM (we don't have anything there).
+ // Don't run all additional hardware tests on ARM.
if (task.group == "Cipher" && task.platform == "aarch64" && task.env &&
(task.env.NSS_DISABLE_PCLMUL == "1" || task.env.NSS_DISABLE_HW_AES == "1"
|| task.env.NSS_DISABLE_AVX == "1")) {
@@ -78,11 +107,19 @@ queue.filter(task => {
queue.map(task => {
if (task.collection == "asan") {
// CRMF and FIPS tests still leak, unfortunately.
- if (task.tests == "crmf" || task.tests == "fips") {
+ if (task.tests == "crmf") {
task.env.ASAN_OPTIONS = "detect_leaks=0";
}
}
+ // We don't run FIPS SSL tests
+ if (task.tests == "ssl") {
+ if (!task.env) {
+ task.env = {};
+ }
+ task.env.NSS_SSL_TESTS = "crl iopr policy";
+ }
+
// Windows is slow.
if (task.platform == "windows2012-64" && task.tests == "chains") {
task.maxRunTime = 7200;
@@ -128,6 +165,18 @@ export default async function main() {
],
});
+ await scheduleLinux("Linux 64 (opt, make)", {
+ env: {USE_64: "1", BUILD_OPT: "1"},
+ platform: "linux64",
+ image: LINUX_IMAGE,
+ collection: "make",
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/build.sh"
+ ],
+ });
+
await scheduleLinux("Linux 32 (debug, make)", {
platform: "linux32",
image: LINUX_IMAGE,
@@ -144,8 +193,8 @@ export default async function main() {
UBSAN_OPTIONS: "print_stacktrace=1",
NSS_DISABLE_ARENA_FREE_LIST: "1",
NSS_DISABLE_UNLOAD: "1",
- CC: "clang",
- CCC: "clang++",
+ CC: "clang-5.0",
+ CCC: "clang++-5.0",
},
platform: "linux64",
collection: "asan",
@@ -153,6 +202,12 @@ export default async function main() {
features: ["allowPtrace"],
}, "--ubsan --asan");
+ await scheduleLinux("Linux 64 (FIPS opt)", {
+ platform: "linux64",
+ collection: "fips",
+ image: LINUX_IMAGE,
+ }, "--enable-fips --opt");
+
await scheduleWindows("Windows 2012 64 (debug, make)", {
platform: "windows2012-64",
collection: "make",
@@ -216,6 +271,82 @@ export default async function main() {
collection: "opt",
}, aarch64_base)
);
+
+ await scheduleLinux("Linux AArch64 (debug, make)",
+ merge({
+ env: {USE_64: "1"},
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/build.sh"
+ ],
+ collection: "make",
+ }, aarch64_base)
+ );
+
+ await scheduleMac("Mac (opt)", {collection: "opt"}, "--opt");
+ await scheduleMac("Mac (debug)", {collection: "debug"});
+}
+
+
+async function scheduleMac(name, base, args = "") {
+ let mac_base = merge(base, {
+ env: {
+ PATH: "/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin",
+ NSS_TASKCLUSTER_MAC: "1",
+ DOMSUF: "localdomain",
+ HOST: "localhost",
+ },
+ provisioner: "localprovisioner",
+ workerType: "nss-macos-10-12",
+ platform: "mac"
+ });
+
+ // Build base definition.
+ let build_base = merge({
+ command: [
+ MAC_CHECKOUT_CMD,
+ ["bash", "-c",
+ "nss/automation/taskcluster/scripts/build_gyp.sh", args]
+ ],
+ provisioner: "localprovisioner",
+ workerType: "nss-macos-10-12",
+ platform: "mac",
+ maxRunTime: 7200,
+ artifacts: [{
+ expires: 24 * 7,
+ type: "directory",
+ path: "public"
+ }],
+ kind: "build",
+ symbol: "B"
+ }, mac_base);
+
+ // The task that builds NSPR+NSS.
+ let task_build = queue.scheduleTask(merge(build_base, {name}));
+
+ // The task that generates certificates.
+ let task_cert = queue.scheduleTask(merge(build_base, {
+ name: "Certificates",
+ command: [
+ MAC_CHECKOUT_CMD,
+ ["bash", "-c",
+ "nss/automation/taskcluster/scripts/gen_certs.sh"]
+ ],
+ parent: task_build,
+ symbol: "Certs"
+ }));
+
+ // Schedule tests.
+ scheduleTests(task_build, task_cert, merge(mac_base, {
+ command: [
+ MAC_CHECKOUT_CMD,
+ ["bash", "-c",
+ "nss/automation/taskcluster/scripts/run_tests.sh"]
+ ]
+ }));
+
+ return queue.submit();
}
/*****************************************************************************/
@@ -242,6 +373,45 @@ async function scheduleLinux(name, base, args = "") {
// The task that builds NSPR+NSS.
let task_build = queue.scheduleTask(merge(build_base, {name}));
+ // Make builds run FIPS tests, which need an extra FIPS build.
+ if (base.collection == "make") {
+ let extra_build = queue.scheduleTask(merge(build_base, {
+ env: { NSS_FORCE_FIPS: "1" },
+ group: "FIPS",
+ name: `${name} w/ NSS_FORCE_FIPS`
+ }));
+
+ // The task that generates certificates.
+ let task_cert = queue.scheduleTask(merge(build_base, {
+ name: "Certificates",
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/gen_certs.sh"
+ ],
+ parent: extra_build,
+ symbol: "Certs-F",
+ group: "FIPS",
+ }));
+
+ // Schedule FIPS tests.
+ queue.scheduleTask(merge(base, {
+ parent: task_cert,
+ name: "FIPS",
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/run_tests.sh"
+ ],
+ cycle: "standard",
+ kind: "test",
+ name: "FIPS tests",
+ symbol: "Tests-F",
+ tests: "fips",
+ group: "FIPS"
+ }));
+ }
+
// The task that generates certificates.
let task_cert = queue.scheduleTask(merge(build_base, {
name: "Certificates",
@@ -266,12 +436,32 @@ async function scheduleLinux(name, base, args = "") {
// Extra builds.
let extra_base = merge({group: "Builds"}, build_base);
queue.scheduleTask(merge(extra_base, {
- name: `${name} w/ clang-4.0`,
+ name: `${name} w/ clang-5.0`,
env: {
- CC: "clang",
- CCC: "clang++",
+ CC: "clang-5.0",
+ CCC: "clang++-5.0",
},
- symbol: "clang-4.0"
+ symbol: "clang-5.0"
+ }));
+
+ queue.scheduleTask(merge(extra_base, {
+ name: `${name} w/ gcc-4.4`,
+ image: LINUX_GCC44_IMAGE,
+ env: {
+ USE_64: "1",
+ CC: "gcc-4.4",
+ CCC: "g++-4.4",
+ // gcc-4.6 introduced nullptr.
+ NSS_DISABLE_GTESTS: "1",
+ },
+ // Use the old Makefile-based build system, GYP doesn't have a proper GCC
+ // version check for __int128 support. It's mainly meant to cover RHEL6.
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/build.sh",
+ ],
+ symbol: "gcc-4.4"
}));
queue.scheduleTask(merge(extra_base, {
@@ -403,12 +593,13 @@ async function scheduleFuzzing() {
// Schedule MPI fuzzing runs.
let mpi_base = merge(run_base, {group: "MPI"});
- let mpi_names = ["add", "addmod", "div", "expmod", "mod", "mulmod", "sqr",
+ let mpi_names = ["add", "addmod", "div", "mod", "mulmod", "sqr",
"sqrmod", "sub", "submod"];
for (let name of mpi_names) {
scheduleFuzzingRun(mpi_base, `MPI (${name})`, `mpi-${name}`, 4096, name);
}
scheduleFuzzingRun(mpi_base, `MPI (invmod)`, `mpi-invmod`, 256, "invmod");
+ scheduleFuzzingRun(mpi_base, `MPI (expmod)`, `mpi-expmod`, 2048, "expmod");
// Schedule TLS fuzzing runs (non-fuzzing mode).
let tls_base = merge(run_base, {group: "TLS"});
@@ -625,6 +816,43 @@ async function scheduleWindows(name, base, build_script) {
symbol: "B"
});
+ // Make builds run FIPS tests, which need an extra FIPS build.
+ if (base.collection == "make") {
+ let extra_build = queue.scheduleTask(merge(build_base, {
+ env: { NSS_FORCE_FIPS: "1" },
+ group: "FIPS",
+ name: `${name} w/ NSS_FORCE_FIPS`
+ }));
+
+ // The task that generates certificates.
+ let task_cert = queue.scheduleTask(merge(build_base, {
+ name: "Certificates",
+ command: [
+ WINDOWS_CHECKOUT_CMD,
+ "bash -c nss/automation/taskcluster/windows/gen_certs.sh"
+ ],
+ parent: extra_build,
+ symbol: "Certs-F",
+ group: "FIPS",
+ }));
+
+ // Schedule FIPS tests.
+ queue.scheduleTask(merge(base, {
+ parent: task_cert,
+ name: "FIPS",
+ command: [
+ WINDOWS_CHECKOUT_CMD,
+ "bash -c nss/automation/taskcluster/windows/run_tests.sh"
+ ],
+ cycle: "standard",
+ kind: "test",
+ name: "FIPS tests",
+ symbol: "Tests-F",
+ tests: "fips",
+ group: "FIPS"
+ }));
+ }
+
// The task that builds NSPR+NSS.
let task_build = queue.scheduleTask(merge(build_base, {name}));
@@ -685,6 +913,13 @@ function scheduleTests(task_build, task_cert, test_base) {
env: {NSS_DISABLE_AVX: "1"}, group: "Cipher"
}));
queue.scheduleTask(merge(no_cert_base, {
+ name: "Cipher tests", symbol: "NoSSSE3|NEON", tests: "cipher",
+ env: {
+ NSS_DISABLE_ARM_NEON: "1",
+ NSS_DISABLE_SSSE3: "1"
+ }, group: "Cipher"
+ }));
+ queue.scheduleTask(merge(no_cert_base, {
name: "EC tests", symbol: "EC", tests: "ec"
}));
queue.scheduleTask(merge(no_cert_base, {
@@ -703,9 +938,6 @@ function scheduleTests(task_build, task_cert, test_base) {
name: "DB tests", symbol: "DB", tests: "dbtests"
}));
queue.scheduleTask(merge(cert_base, {
- name: "FIPS tests", symbol: "FIPS", tests: "fips"
- }));
- queue.scheduleTask(merge(cert_base, {
name: "Merge tests", symbol: "Merge", tests: "merge"
}));
queue.scheduleTask(merge(cert_base, {
@@ -739,6 +971,18 @@ async function scheduleTools() {
kind: "test"
};
+ //ABI check task
+ queue.scheduleTask(merge(base, {
+ symbol: "abi",
+ name: "abi",
+ image: LINUX_IMAGE,
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/check_abi.sh"
+ ],
+ }));
+
queue.scheduleTask(merge(base, {
symbol: "clang-format-3.9",
name: "clang-format-3.9",
@@ -751,13 +995,13 @@ async function scheduleTools() {
}));
queue.scheduleTask(merge(base, {
- symbol: "scan-build-4.0",
- name: "scan-build-4.0",
+ symbol: "scan-build-5.0",
+ name: "scan-build-5.0",
image: LINUX_IMAGE,
env: {
USE_64: "1",
- CC: "clang",
- CCC: "clang++",
+ CC: "clang-5.0",
+ CCC: "clang++-5.0",
},
artifacts: {
public: {
@@ -773,5 +1017,80 @@ async function scheduleTools() {
]
}));
+ queue.scheduleTask(merge(base, {
+ symbol: "hacl",
+ name: "hacl",
+ image: HACL_GEN_IMAGE,
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/run_hacl.sh"
+ ]
+ }));
+
+ let task_saw = queue.scheduleTask(merge(base, {
+ symbol: "B",
+ group: "SAW",
+ name: "LLVM bitcode build (32 bit)",
+ image: SAW_IMAGE,
+ kind: "build",
+ env: {
+ AR: "llvm-ar-3.8",
+ CC: "clang-3.8",
+ CCC: "clang++-3.8"
+ },
+ artifacts: {
+ public: {
+ expires: 24 * 7,
+ type: "directory",
+ path: "/home/worker/artifacts"
+ }
+ },
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/build_gyp.sh --disable-tests --emit-llvm -m32"
+ ]
+ }));
+
+ queue.scheduleTask(merge(base, {
+ parent: task_saw,
+ symbol: "bmul",
+ group: "SAW",
+ name: "bmul.saw",
+ image: SAW_IMAGE,
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/run_saw.sh bmul"
+ ]
+ }));
+
+ queue.scheduleTask(merge(base, {
+ parent: task_saw,
+ symbol: "ChaCha20",
+ group: "SAW",
+ name: "chacha20.saw",
+ image: SAW_IMAGE,
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/run_saw.sh chacha20"
+ ]
+ }));
+
+ queue.scheduleTask(merge(base, {
+ parent: task_saw,
+ symbol: "Poly1305",
+ group: "SAW",
+ name: "poly1305.saw",
+ image: SAW_IMAGE,
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/run_saw.sh poly1305"
+ ]
+ }));
+
return queue.submit();
}
diff --git a/security/nss/automation/taskcluster/graph/src/image_builder.js b/security/nss/automation/taskcluster/graph/src/image_builder.js
index bc90e0242..d9d7755dc 100644
--- a/security/nss/automation/taskcluster/graph/src/image_builder.js
+++ b/security/nss/automation/taskcluster/graph/src/image_builder.js
@@ -30,14 +30,12 @@ export async function buildTask({name, path}) {
let ns = `docker.images.v1.${process.env.TC_PROJECT}.${name}.hash.${hash}`;
return {
- name: "Image Builder",
- image: "taskcluster/image_builder:0.1.5",
+ name: `Image Builder (${name})`,
+ image: "nssdev/image_builder:0.1.5",
routes: ["index." + ns],
env: {
- HEAD_REPOSITORY: process.env.NSS_HEAD_REPOSITORY,
- BASE_REPOSITORY: process.env.NSS_HEAD_REPOSITORY,
- HEAD_REV: process.env.NSS_HEAD_REVISION,
- HEAD_REF: process.env.NSS_HEAD_REVISION,
+ NSS_HEAD_REPOSITORY: process.env.NSS_HEAD_REPOSITORY,
+ NSS_HEAD_REVISION: process.env.NSS_HEAD_REVISION,
PROJECT: process.env.TC_PROJECT,
CONTEXT_PATH: path,
HASH: hash
@@ -52,10 +50,11 @@ export async function buildTask({name, path}) {
command: [
"/bin/bash",
"-c",
- "/home/worker/bin/build_image.sh"
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/build_image.sh"
],
platform: "nss-decision",
features: ["dind"],
+ maxRunTime: 7200,
kind: "build",
symbol: "I"
};
diff --git a/security/nss/automation/taskcluster/graph/src/queue.js b/security/nss/automation/taskcluster/graph/src/queue.js
index 29b570729..809a17bf1 100644
--- a/security/nss/automation/taskcluster/graph/src/queue.js
+++ b/security/nss/automation/taskcluster/graph/src/queue.js
@@ -31,10 +31,11 @@ function parseRoutes(routes) {
];
// Notify about failures (except on try).
- if (process.env.TC_PROJECT != "nss-try") {
+ // Turned off, too noisy.
+ /*if (process.env.TC_PROJECT != "nss-try") {
rv.push(`notify.email.${process.env.TC_OWNER}.on-failed`,
`notify.email.${process.env.TC_OWNER}.on-exception`);
- }
+ }*/
return rv;
}
diff --git a/security/nss/automation/taskcluster/graph/src/try_syntax.js b/security/nss/automation/taskcluster/graph/src/try_syntax.js
index 7748e068a..1c06dde13 100644
--- a/security/nss/automation/taskcluster/graph/src/try_syntax.js
+++ b/security/nss/automation/taskcluster/graph/src/try_syntax.js
@@ -22,10 +22,10 @@ function parseOptions(opts) {
}
// Parse platforms.
- let allPlatforms = ["linux", "linux64", "linux64-asan",
+ let allPlatforms = ["linux", "linux64", "linux64-asan", "linux64-fips",
"win", "win64", "win-make", "win64-make",
"linux64-make", "linux-make", "linux-fuzz",
- "linux64-fuzz", "aarch64"];
+ "linux64-fuzz", "aarch64", "mac"];
let platforms = intersect(opts.platform.split(/\s*,\s*/), allPlatforms);
// If the given value is nonsense or "none" default to all platforms.
@@ -51,7 +51,7 @@ function parseOptions(opts) {
}
// Parse tools.
- let allTools = ["clang-format", "scan-build"];
+ let allTools = ["clang-format", "scan-build", "hacl", "saw", "abi"];
let tools = intersect(opts.tools.split(/\s*,\s*/), allTools);
// If the given value is "all" run all tools.
@@ -77,7 +77,8 @@ function filter(opts) {
// are not affected by platform or build type selectors.
if (task.platform == "nss-tools") {
return opts.tools.some(tool => {
- return task.symbol.toLowerCase().startsWith(tool);
+ return task.symbol.toLowerCase().startsWith(tool) ||
+ (task.group && task.group.toLowerCase().startsWith(tool));
});
}
@@ -111,6 +112,7 @@ function filter(opts) {
"linux": "linux32",
"linux-fuzz": "linux32",
"linux64-asan": "linux64",
+ "linux64-fips": "linux64",
"linux64-fuzz": "linux64",
"linux64-make": "linux64",
"linux-make": "linux32",
@@ -126,6 +128,8 @@ function filter(opts) {
// Additional checks.
if (platform == "linux64-asan") {
keep &= coll("asan");
+ } else if (platform == "linux64-fips") {
+ keep &= coll("fips");
} else if (platform == "linux64-make" || platform == "linux-make" ||
platform == "win64-make" || platform == "win-make") {
keep &= coll("make");
diff --git a/security/nss/automation/taskcluster/image_builder/Dockerfile b/security/nss/automation/taskcluster/image_builder/Dockerfile
new file mode 100644
index 000000000..f8b4edcc5
--- /dev/null
+++ b/security/nss/automation/taskcluster/image_builder/Dockerfile
@@ -0,0 +1,23 @@
+FROM ubuntu:16.04
+MAINTAINER Tim Taubert <ttaubert@mozilla.com>
+
+WORKDIR /home/worker
+
+ENV DEBIAN_FRONTEND noninteractive
+
+RUN apt-get update && apt-get install -y apt-transport-https apt-utils
+RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 36A1D7869245C8950F966E92D8576A8BA88D21E9 && \
+ sh -c "echo deb https://get.docker.io/ubuntu docker main \
+ > /etc/apt/sources.list.d/docker.list"
+RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 41BD8711B1F0EC2B0D85B91CF59CE3A8323293EE && \
+ sh -c "echo deb http://ppa.launchpad.net/mercurial-ppa/releases/ubuntu xenial main \
+ > /etc/apt/sources.list.d/mercurial.list"
+RUN apt-get update && apt-get install -y \
+ lxc-docker-1.6.1 \
+ mercurial
+
+ADD bin /home/worker/bin
+RUN chmod +x /home/worker/bin/*
+
+# Set a default command useful for debugging
+CMD ["/bin/bash", "--login"]
diff --git a/security/nss/automation/taskcluster/image_builder/VERSION b/security/nss/automation/taskcluster/image_builder/VERSION
new file mode 100644
index 000000000..9faa1b7a7
--- /dev/null
+++ b/security/nss/automation/taskcluster/image_builder/VERSION
@@ -0,0 +1 @@
+0.1.5
diff --git a/security/nss/automation/taskcluster/image_builder/bin/checkout.sh b/security/nss/automation/taskcluster/image_builder/bin/checkout.sh
new file mode 100644
index 000000000..0cdd2ac40
--- /dev/null
+++ b/security/nss/automation/taskcluster/image_builder/bin/checkout.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env bash
+
+set -v -e -x
+
+# Default values for testing.
+REVISION=${NSS_HEAD_REVISION:-default}
+REPOSITORY=${NSS_HEAD_REPOSITORY:-https://hg.mozilla.org/projects/nss}
+
+# Clone NSS.
+for i in 0 2 5; do
+ sleep $i
+ hg clone -r $REVISION $REPOSITORY nss && exit 0
+ rm -rf nss
+done
+exit 1
diff --git a/security/nss/automation/taskcluster/scripts/build_gyp.sh b/security/nss/automation/taskcluster/scripts/build_gyp.sh
index 7190bd5c4..fb3a33a52 100755
--- a/security/nss/automation/taskcluster/scripts/build_gyp.sh
+++ b/security/nss/automation/taskcluster/scripts/build_gyp.sh
@@ -9,5 +9,10 @@ hg_clone https://hg.mozilla.org/projects/nspr ./nspr default
nss/build.sh -g -v "$@"
# Package.
-mkdir artifacts
-tar cvfjh artifacts/dist.tar.bz2 dist
+if [[ $(uname) = "Darwin" ]]; then
+ mkdir -p public
+ tar cvfjh public/dist.tar.bz2 dist
+else
+ mkdir artifacts
+ tar cvfjh artifacts/dist.tar.bz2 dist
+fi
diff --git a/security/nss/automation/taskcluster/scripts/build_image.sh b/security/nss/automation/taskcluster/scripts/build_image.sh
new file mode 100644
index 000000000..b422214e7
--- /dev/null
+++ b/security/nss/automation/taskcluster/scripts/build_image.sh
@@ -0,0 +1,24 @@
+#!/bin/bash -vex
+
+set -x -e -v
+
+# Prefix errors with taskcluster error prefix so that they are parsed by Treeherder
+raise_error() {
+ echo
+ echo "[taskcluster-image-build:error] $1"
+ exit 1
+}
+
+# Ensure that the PROJECT is specified so the image can be indexed
+test -n "$PROJECT" || raise_error "Project must be provided."
+test -n "$HASH" || raise_error "Context Hash must be provided."
+
+CONTEXT_PATH=/home/worker/nss/$CONTEXT_PATH
+
+test -d $CONTEXT_PATH || raise_error "Context Path $CONTEXT_PATH does not exist."
+test -f "$CONTEXT_PATH/Dockerfile" || raise_error "Dockerfile must be present in $CONTEXT_PATH."
+
+docker build -t $PROJECT:$HASH $CONTEXT_PATH
+
+mkdir /artifacts
+docker save $PROJECT:$HASH > /artifacts/image.tar
diff --git a/security/nss/automation/taskcluster/scripts/check_abi.sh b/security/nss/automation/taskcluster/scripts/check_abi.sh
new file mode 100644
index 000000000..dbc1a476f
--- /dev/null
+++ b/security/nss/automation/taskcluster/scripts/check_abi.sh
@@ -0,0 +1,172 @@
+#! /bin/bash
+
+set_env()
+{
+ cd /home/worker
+ HGDIR=/home/worker
+ OUTPUTDIR=$(pwd)$(echo "/output")
+ DATE=$(date "+TB [%Y-%m-%d %H:%M:%S]")
+
+ if [ ! -d "${OUTPUTDIR}" ]; then
+ echo "Creating output dir"
+ mkdir "${OUTPUTDIR}"
+ fi
+
+ if [ ! -d "nspr" ]; then
+ for i in 0 2 5; do
+ sleep $i
+ hg clone -r "default" "https://hg.mozilla.org/projects/nspr" "${HGDIR}/nspr" && break
+ rm -rf nspr
+ done
+ fi
+
+ cd nss
+ ./build.sh -v -c
+ cd ..
+}
+
+check_abi()
+{
+ set_env
+ set +e #reverses set -e from build.sh to allow possible hg clone failures
+ if [[ "$1" != --nobuild ]]; then # Start nobuild block
+
+ echo "######## NSS ABI CHECK ########"
+ echo "######## creating temporary HG clones ########"
+
+ rm -rf ${HGDIR}/baseline
+ mkdir ${HGDIR}/baseline
+ BASE_NSS=`cat ${HGDIR}/nss/automation/abi-check/previous-nss-release` #Reads the version number of the last release from the respective file
+ NSS_CLONE_RESULT=0
+ for i in 0 2 5; do
+ sleep $i
+ hg clone -u "${BASE_NSS}" "https://hg.mozilla.org/projects/nss" "${HGDIR}/baseline/nss"
+ if [ $? -eq 0 ]; then
+ NSS_CLONE_RESULT=0
+ break
+ fi
+ rm -rf "${HGDIR}/baseline/nss"
+ NSS_CLONE_RESULT=1
+ done
+ if [ ${NSS_CLONE_RESULT} -ne 0 ]; then
+ echo "invalid tag in automation/abi-check/previous-nss-release"
+ return 1
+ fi
+
+ BASE_NSPR=NSPR_$(head -1 ${HGDIR}/baseline/nss/automation/release/nspr-version.txt | cut -d . -f 1-2 | tr . _)_BRANCH
+ hg clone -u "${BASE_NSPR}" "https://hg.mozilla.org/projects/nspr" "${HGDIR}/baseline/nspr"
+ NSPR_CLONE_RESULT=$?
+
+ if [ ${NSPR_CLONE_RESULT} -ne 0 ]; then
+ rm -rf "${HGDIR}/baseline/nspr"
+ for i in 0 2 5; do
+ sleep $i
+ hg clone -u "default" "https://hg.mozilla.org/projects/nspr" "${HGDIR}/baseline/nspr" && break
+ rm -rf "${HGDIR}/baseline/nspr"
+ done
+ echo "Nonexisting tag ${BASE_NSPR} derived from ${BASE_NSS} automation/release/nspr-version.txt"
+ echo "Using default branch instead."
+ fi
+
+ echo "######## building baseline NSPR/NSS ########"
+ echo "${HGDIR}/baseline/nss/build.sh"
+ cd ${HGDIR}/baseline/nss
+ ./build.sh -v -c
+ cd ${HGDIR}
+ else # Else nobuild block
+ echo "######## using existing baseline NSPR/NSS build ########"
+ fi # End nobuild block
+
+ set +e #reverses set -e from build.sh to allow abidiff failures
+
+ echo "######## Starting abidiff procedure ########"
+ abi_diff
+}
+
+#Slightly modified from builbot-slave/build.sh
+abi_diff()
+{
+ ABI_PROBLEM_FOUND=0
+ ABI_REPORT=${OUTPUTDIR}/abi-diff.txt
+ rm -f ${ABI_REPORT}
+ PREVDIST=${HGDIR}/baseline/dist
+ NEWDIST=${HGDIR}/dist
+ ALL_SOs="libfreebl3.so libfreeblpriv3.so libnspr4.so libnss3.so libnssckbi.so libnssdbm3.so libnsssysinit.so libnssutil3.so libplc4.so libplds4.so libsmime3.so libsoftokn3.so libssl3.so"
+ for SO in ${ALL_SOs}; do
+ if [ ! -f ${HGDIR}/nss/automation/abi-check/expected-report-$SO.txt ]; then
+ touch ${HGDIR}/nss/automation/abi-check/expected-report-$SO.txt
+ fi
+ abidiff --hd1 $PREVDIST/public/ --hd2 $NEWDIST/public \
+ $PREVDIST/*/lib/$SO $NEWDIST/*/lib/$SO \
+ > ${HGDIR}/nss/automation/abi-check/new-report-temp$SO.txt
+ RET=$?
+ cat ${HGDIR}/nss/automation/abi-check/new-report-temp$SO.txt \
+ | grep -v "^Functions changes summary:" \
+ | grep -v "^Variables changes summary:" \
+ > ${HGDIR}/nss/automation/abi-check/new-report-$SO.txt
+ rm -f ${HGDIR}/nss/automation/abi-check/new-report-temp$SO.txt
+
+ ABIDIFF_ERROR=$((($RET & 0x01) != 0))
+ ABIDIFF_USAGE_ERROR=$((($RET & 0x02) != 0))
+ ABIDIFF_ABI_CHANGE=$((($RET & 0x04) != 0))
+ ABIDIFF_ABI_INCOMPATIBLE_CHANGE=$((($RET & 0x08) != 0))
+ ABIDIFF_UNKNOWN_BIT_SET=$((($RET & 0xf0) != 0))
+
+ # If abidiff reports an error, or a usage error, or if it sets a result
+ # bit value this script doesn't know yet about, we'll report failure.
+ # For ABI changes, we don't yet report an error. We'll compare the
+ # result report with our whitelist. This allows us to silence changes
+ # that we're already aware of and have been declared acceptable.
+
+ REPORT_RET_AS_FAILURE=0
+ if [ $ABIDIFF_ERROR -ne 0 ]; then
+ echo "abidiff reported ABIDIFF_ERROR."
+ REPORT_RET_AS_FAILURE=1
+ fi
+ if [ $ABIDIFF_USAGE_ERROR -ne 0 ]; then
+ echo "abidiff reported ABIDIFF_USAGE_ERROR."
+ REPORT_RET_AS_FAILURE=1
+ fi
+ if [ $ABIDIFF_UNKNOWN_BIT_SET -ne 0 ]; then
+ echo "abidiff reported ABIDIFF_UNKNOWN_BIT_SET."
+ REPORT_RET_AS_FAILURE=1
+ fi
+
+ if [ $ABIDIFF_ABI_CHANGE -ne 0 ]; then
+ echo "Ignoring abidiff result ABI_CHANGE, instead we'll check for non-whitelisted differences."
+ fi
+ if [ $ABIDIFF_ABI_INCOMPATIBLE_CHANGE -ne 0 ]; then
+ echo "Ignoring abidiff result ABIDIFF_ABI_INCOMPATIBLE_CHANGE, instead we'll check for non-whitelisted differences."
+ fi
+
+ if [ $REPORT_RET_AS_FAILURE -ne 0 ]; then
+ ABI_PROBLEM_FOUND=1
+ echo "abidiff {$PREVDIST , $NEWDIST} for $SO FAILED with result $RET, or failed writing to ${HGDIR}/nss/automation/abi-check/new-report-$SO.txt"
+ fi
+ if [ ! -f ${HGDIR}/nss/automation/abi-check/expected-report-$SO.txt ]; then
+ ABI_PROBLEM_FOUND=1
+ echo "FAILED to access report file: ${HGDIR}/nss/automation/abi-check/expected-report-$SO.txt"
+ fi
+
+ diff -wB -u ${HGDIR}/nss/automation/abi-check/expected-report-$SO.txt \
+ ${HGDIR}/nss/automation/abi-check/new-report-$SO.txt >> ${ABI_REPORT}
+ if [ ! -f ${ABI_REPORT} ]; then
+ ABI_PROBLEM_FOUND=1
+ echo "FAILED to compare exepcted and new report: ${HGDIR}/nss/automation/abi-check/new-report-$SO.txt"
+ fi
+ done
+
+ if [ -s ${ABI_REPORT} ]; then
+ echo "FAILED: there are new unexpected ABI changes"
+ cat ${ABI_REPORT}
+ return 1
+ elif [ $ABI_PROBLEM_FOUND -ne 0 ]; then
+ echo "FAILED: failure executing the ABI checks"
+ cat ${ABI_REPORT}
+ return 1
+ fi
+
+ return 0
+}
+
+check_abi $1
diff --git a/security/nss/automation/taskcluster/scripts/gen_certs.sh b/security/nss/automation/taskcluster/scripts/gen_certs.sh
index b8d4f60ba..c03db7e9c 100755
--- a/security/nss/automation/taskcluster/scripts/gen_certs.sh
+++ b/security/nss/automation/taskcluster/scripts/gen_certs.sh
@@ -12,5 +12,10 @@ NSS_TESTS=cert NSS_CYCLES="standard pkix sharedb" $(dirname $0)/run_tests.sh
echo 1 > tests_results/security/localhost
# Package.
-mkdir artifacts
-tar cvfjh artifacts/dist.tar.bz2 dist tests_results
+if [[ $(uname) = "Darwin" ]]; then
+ mkdir -p public
+ tar cvfjh public/dist.tar.bz2 dist tests_results
+else
+ mkdir artifacts
+ tar cvfjh artifacts/dist.tar.bz2 dist tests_results
+fi
diff --git a/security/nss/automation/taskcluster/scripts/run_hacl.sh b/security/nss/automation/taskcluster/scripts/run_hacl.sh
new file mode 100644
index 000000000..281075eef
--- /dev/null
+++ b/security/nss/automation/taskcluster/scripts/run_hacl.sh
@@ -0,0 +1,40 @@
+#!/usr/bin/env bash
+
+if [[ $(id -u) -eq 0 ]]; then
+ # Drop privileges by re-running this script.
+ # Note: this mangles arguments, better to avoid running scripts as root.
+ exec su worker -c "$0 $*"
+fi
+
+set -e -x -v
+
+# The docker image this is running in has the HACL* and NSS sources.
+# The extracted C code from HACL* is already generated and the HACL* tests were
+# successfully executed.
+
+# Verify Poly1305 (doesn't work in docker image build)
+make verify -C ~/hacl-star/code/poly1305 -j$(nproc)
+
+# Add license header to specs
+spec_files=($(find ~/hacl-star/specs -type f -name '*.fst'))
+for f in "${spec_files[@]}"; do
+ cat /tmp/license.txt "$f" > /tmp/tmpfile && mv /tmp/tmpfile "$f"
+done
+
+# Format the extracted C code.
+cd ~/hacl-star/snapshots/nss
+cp ~/nss/.clang-format .
+find . -type f -name '*.[ch]' -exec clang-format -i {} \+
+
+# These diff commands will return 1 if there are differences and stop the script.
+files=($(find ~/nss/lib/freebl/verified/ -type f -name '*.[ch]'))
+for f in "${files[@]}"; do
+ diff $f $(basename "$f")
+done
+
+# Check that the specs didn't change either.
+cd ~/hacl-star/specs
+files=($(find ~/nss/lib/freebl/verified/specs -type f))
+for f in "${files[@]}"; do
+ diff $f $(basename "$f")
+done
diff --git a/security/nss/automation/taskcluster/scripts/run_saw.sh b/security/nss/automation/taskcluster/scripts/run_saw.sh
new file mode 100644
index 000000000..0e9a8224a
--- /dev/null
+++ b/security/nss/automation/taskcluster/scripts/run_saw.sh
@@ -0,0 +1,9 @@
+#!/usr/bin/env bash
+
+source $(dirname "$0")/tools.sh
+
+# Fetch artifact if needed.
+fetch_dist
+
+# Run SAW.
+saw "nss/automation/saw/$1.saw"
diff --git a/security/nss/automation/taskcluster/scripts/run_scan_build.sh b/security/nss/automation/taskcluster/scripts/run_scan_build.sh
index 4024c226e..014530b42 100755
--- a/security/nss/automation/taskcluster/scripts/run_scan_build.sh
+++ b/security/nss/automation/taskcluster/scripts/run_scan_build.sh
@@ -34,7 +34,7 @@ for i in "${!scan[@]}"; do
done
# run scan-build (only building affected directories)
-scan-build -o /home/worker/artifacts --use-cc=$CC --use-c++=$CCC make nss_build_all && cd ..
+scan-build-5.0 -o /home/worker/artifacts --use-cc=$CC --use-c++=$CCC make nss_build_all && cd ..
# print errors we found
set +v +x
diff --git a/security/nss/automation/taskcluster/scripts/split.sh b/security/nss/automation/taskcluster/scripts/split.sh
index 4d18385ec..fded64e1b 100644
--- a/security/nss/automation/taskcluster/scripts/split.sh
+++ b/security/nss/automation/taskcluster/scripts/split.sh
@@ -23,16 +23,10 @@ split_util() {
# Copy everything.
cp -R $nssdir $dstdir
- # Skip gtests when building.
- sed '/^DIRS = /s/ cpputil gtests$//' $nssdir/manifest.mn > $dstdir/manifest.mn-t && mv $dstdir/manifest.mn-t $dstdir/manifest.mn
-
# Remove subdirectories that we don't want.
rm -rf $dstdir/cmd
- rm -rf $dstdir/tests
rm -rf $dstdir/lib
rm -rf $dstdir/automation
- rm -rf $dstdir/gtests
- rm -rf $dstdir/cpputil
rm -rf $dstdir/doc
# Start with an empty cmd lib directories to be filled selectively.
diff --git a/security/nss/automation/taskcluster/windows/releng.manifest b/security/nss/automation/taskcluster/windows/releng.manifest
index 68d2c1d9e..d571c544d 100644
--- a/security/nss/automation/taskcluster/windows/releng.manifest
+++ b/security/nss/automation/taskcluster/windows/releng.manifest
@@ -1,10 +1,10 @@
[
{
- "version": "Visual Studio 2015 Update 3 14.0.25425.01 / SDK 10.0.14393.0",
- "size": 326656969,
- "digest": "babc414ffc0457d27f5a1ed24a8e4873afbe2f1c1a4075469a27c005e1babc3b2a788f643f825efedff95b79686664c67ec4340ed535487168a3482e68559bc7",
+ "version": "Visual Studio 2017 15.4.2 / SDK 10.0.15063.0",
+ "size": 303146863,
+ "digest": "18700889e6b5e81613b9cf57ce4e0d46a6ee45bb4c5c33bae2604a5275326128775b8a032a1eb178c5db973746d565340c4e36d98375789e1d5bd836ab16ba58",
"algorithm": "sha512",
- "filename": "vs2015u3.zip",
+ "filename": "vs2017_15.4.2.zip",
"unpack": true
},
{
diff --git a/security/nss/automation/taskcluster/windows/setup.sh b/security/nss/automation/taskcluster/windows/setup.sh
index 7def50db4..36a040ba1 100644
--- a/security/nss/automation/taskcluster/windows/setup.sh
+++ b/security/nss/automation/taskcluster/windows/setup.sh
@@ -2,12 +2,12 @@
set -v -e -x
-export VSPATH="$(pwd)/vs2015u3"
+export VSPATH="$(pwd)/vs2017_15.4.2"
export NINJA_PATH="$(pwd)/ninja/bin"
export WINDOWSSDKDIR="${VSPATH}/SDK"
export VS90COMNTOOLS="${VSPATH}/VC"
-export INCLUDE="${VSPATH}/VC/include:${VSPATH}/SDK/Include/10.0.14393.0/ucrt:${VSPATH}/SDK/Include/10.0.14393.0/shared:${VSPATH}/SDK/Include/10.0.14393.0/um"
+export INCLUDE="${VSPATH}/VC/include:${VSPATH}/SDK/Include/10.0.15063.0/ucrt:${VSPATH}/SDK/Include/10.0.15063.0/shared:${VSPATH}/SDK/Include/10.0.15063.0/um"
# Usage: hg_clone repo dir [revision=@]
hg_clone() {
@@ -23,4 +23,4 @@ hg_clone() {
}
hg_clone https://hg.mozilla.org/build/tools tools default
-tools/scripts/tooltool/tooltool_wrapper.sh $(dirname $0)/releng.manifest https://api.pub.build.mozilla.org/tooltool/ non-existant-file.sh /c/mozilla-build/python/python.exe /c/builds/tooltool.py --authentication-file /c/builds/relengapi.tok -c /c/builds/tooltool_cache
+tools/scripts/tooltool/tooltool_wrapper.sh $(dirname $0)/releng.manifest https://tooltool.mozilla-releng.net/ non-existant-file.sh /c/mozilla-build/python/python.exe /c/builds/tooltool.py --authentication-file /c/builds/relengapi.tok -c /c/builds/tooltool_cache
diff --git a/security/nss/automation/taskcluster/windows/setup32.sh b/security/nss/automation/taskcluster/windows/setup32.sh
index bcddabfa3..19bed284d 100644
--- a/security/nss/automation/taskcluster/windows/setup32.sh
+++ b/security/nss/automation/taskcluster/windows/setup32.sh
@@ -4,7 +4,7 @@ set -v -e -x
source $(dirname $0)/setup.sh
-export WIN32_REDIST_DIR="${VSPATH}/VC/redist/x86/Microsoft.VC140.CRT"
+export WIN32_REDIST_DIR="${VSPATH}/VC/redist/x86/Microsoft.VC141.CRT"
export WIN_UCRT_REDIST_DIR="${VSPATH}/SDK/Redist/ucrt/DLLs/x86"
-export PATH="${NINJA_PATH}:${VSPATH}/VC/bin/amd64_x86:${VSPATH}/VC/bin/amd64:${VSPATH}/VC/bin:${VSPATH}/SDK/bin/x86:${VSPATH}/SDK/bin/x64:${VSPATH}/VC/redist/x86/Microsoft.VC140.CRT:${VSPATH}/VC/redist/x64/Microsoft.VC140.CRT:${VSPATH}/SDK/Redist/ucrt/DLLs/x86:${VSPATH}/SDK/Redist/ucrt/DLLs/x64:${PATH}"
-export LIB="${VSPATH}/VC/lib:${VSPATH}/SDK/lib/10.0.14393.0/ucrt/x86:${VSPATH}/SDK/lib/10.0.14393.0/um/x86"
+export PATH="${NINJA_PATH}:${VSPATH}/VC/bin/Hostx64/x86:${VSPATH}/VC/bin/Hostx64/x64:${VSPATH}/VC/Hostx86/x86:${VSPATH}/SDK/bin/10.0.15063.0/x64:${VSPATH}/VC/redist/x86/Microsoft.VC141.CRT:${VSPATH}/SDK/Redist/ucrt/DLLs/x86:${PATH}"
+export LIB="${VSPATH}/VC/lib/x86:${VSPATH}/SDK/lib/10.0.15063.0/ucrt/x86:${VSPATH}/SDK/lib/10.0.15063.0/um/x86"
diff --git a/security/nss/automation/taskcluster/windows/setup64.sh b/security/nss/automation/taskcluster/windows/setup64.sh
index f308298c1..d16cb0ec9 100644
--- a/security/nss/automation/taskcluster/windows/setup64.sh
+++ b/security/nss/automation/taskcluster/windows/setup64.sh
@@ -4,7 +4,7 @@ set -v -e -x
source $(dirname $0)/setup.sh
-export WIN32_REDIST_DIR="${VSPATH}/VC/redist/x64/Microsoft.VC140.CRT"
+export WIN32_REDIST_DIR="${VSPATH}/VC/redist/x64/Microsoft.VC141.CRT"
export WIN_UCRT_REDIST_DIR="${VSPATH}/SDK/Redist/ucrt/DLLs/x64"
-export PATH="${NINJA_PATH}:${VSPATH}/VC/bin/amd64:${VSPATH}/VC/bin:${VSPATH}/SDK/bin/x64:${VSPATH}/VC/redist/x64/Microsoft.VC140.CRT:${VSPATH}/SDK/Redist/ucrt/DLLs/x64:${PATH}"
-export LIB="${VSPATH}/VC/lib/amd64:${VSPATH}/SDK/lib/10.0.14393.0/ucrt/x64:${VSPATH}/SDK/lib/10.0.14393.0/um/x64"
+export PATH="${NINJA_PATH}:${VSPATH}/VC/bin/Hostx64/x64:${VSPATH}/VC/bin/Hostx86/x86:${VSPATH}/SDK/bin/10.0.15063.0/x64:${VSPATH}/VC/redist/x64/Microsoft.VC141.CRT:${VSPATH}/SDK/Redist/ucrt/DLLs/x64:${PATH}"
+export LIB="${VSPATH}/VC/lib/x64:${VSPATH}/SDK/lib/10.0.15063.0/ucrt/x64:${VSPATH}/SDK/lib/10.0.15063.0/um/x64"