summaryrefslogtreecommitdiffstats
path: root/security/nss/automation/taskcluster/graph/src
diff options
context:
space:
mode:
Diffstat (limited to 'security/nss/automation/taskcluster/graph/src')
-rw-r--r--security/nss/automation/taskcluster/graph/src/context_hash.js43
-rw-r--r--security/nss/automation/taskcluster/graph/src/extend.js572
-rw-r--r--security/nss/automation/taskcluster/graph/src/image_builder.js62
-rw-r--r--security/nss/automation/taskcluster/graph/src/index.js14
-rw-r--r--security/nss/automation/taskcluster/graph/src/merge.js10
-rw-r--r--security/nss/automation/taskcluster/graph/src/queue.js242
-rw-r--r--security/nss/automation/taskcluster/graph/src/try_syntax.js159
7 files changed, 1102 insertions, 0 deletions
diff --git a/security/nss/automation/taskcluster/graph/src/context_hash.js b/security/nss/automation/taskcluster/graph/src/context_hash.js
new file mode 100644
index 000000000..f0a2e9a88
--- /dev/null
+++ b/security/nss/automation/taskcluster/graph/src/context_hash.js
@@ -0,0 +1,43 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+import fs from "fs";
+import path from "path";
+import crypto from "crypto";
+import flatmap from "flatmap";
+
+// Compute the SHA-256 digest.
+function sha256(data) {
+ let hash = crypto.createHash("sha256");
+ hash.update(data);
+ return hash.digest("hex");
+}
+
+// Recursively collect a list of all files of a given directory.
+function collectFilesInDirectory(dir) {
+ return flatmap(fs.readdirSync(dir), entry => {
+ let entry_path = path.join(dir, entry);
+
+ if (fs.lstatSync(entry_path).isDirectory()) {
+ return collectFilesInDirectory(entry_path);
+ }
+
+ return [entry_path];
+ });
+}
+
+// Compute a context hash for the given context path.
+export default function (context_path) {
+ let root = path.join(__dirname, "../../../..");
+ let dir = path.join(root, context_path);
+ let files = collectFilesInDirectory(dir).sort();
+ let hashes = files.map(file => {
+ return sha256(file + "|" + fs.readFileSync(file, "utf-8"));
+ });
+
+ // Generate a new prefix every month to ensure the image stays buildable.
+ let now = new Date();
+ let prefix = `${now.getUTCFullYear()}-${now.getUTCMonth() + 1}:`;
+ return sha256(prefix + hashes.join(","));
+}
diff --git a/security/nss/automation/taskcluster/graph/src/extend.js b/security/nss/automation/taskcluster/graph/src/extend.js
new file mode 100644
index 000000000..a6a8fcbe2
--- /dev/null
+++ b/security/nss/automation/taskcluster/graph/src/extend.js
@@ -0,0 +1,572 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+import merge from "./merge";
+import * as queue from "./queue";
+
+const LINUX_IMAGE = {name: "linux", path: "automation/taskcluster/docker"};
+
+const WINDOWS_CHECKOUT_CMD =
+ "bash -c \"hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss || " +
+ "(sleep 2; hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss) || " +
+ "(sleep 5; hg clone -r $NSS_HEAD_REVISION $NSS_HEAD_REPOSITORY nss)\"";
+
+/*****************************************************************************/
+
+queue.filter(task => {
+ if (task.group == "Builds") {
+ // Remove extra builds on {A,UB}San and ARM.
+ if (task.collection == "asan" || task.collection == "arm-debug" ||
+ task.collection == "gyp-asan") {
+ return false;
+ }
+
+ // Remove extra builds w/o libpkix for non-linux64-debug.
+ if (task.symbol == "noLibpkix" &&
+ (task.platform != "linux64" || task.collection != "debug")) {
+ return false;
+ }
+ }
+
+ if (task.tests == "bogo") {
+ // No BoGo tests on Windows.
+ if (task.platform == "windows2012-64") {
+ return false;
+ }
+
+ // No BoGo tests on ARM.
+ if (task.collection == "arm-debug") {
+ return false;
+ }
+ }
+
+ // GYP builds with -Ddisable_libpkix=1 by default.
+ if ((task.collection == "gyp" || task.collection == "gyp-asan") &&
+ task.tests == "chains") {
+ return false;
+ }
+
+ return true;
+});
+
+queue.map(task => {
+ if (task.collection == "asan" || task.collection == "gyp-asan") {
+ // CRMF and FIPS tests still leak, unfortunately.
+ if (task.tests == "crmf" || task.tests == "fips") {
+ task.env.ASAN_OPTIONS = "detect_leaks=0";
+ }
+ }
+
+ if (task.collection == "arm-debug") {
+ // These tests take quite some time on our poor ARM devices.
+ if (task.tests == "chains" || (task.tests == "ssl" && task.cycle == "standard")) {
+ task.maxRunTime = 14400;
+ }
+ }
+
+ // Windows is slow.
+ if (task.platform == "windows2012-64" && task.tests == "chains") {
+ task.maxRunTime = 7200;
+ }
+
+ // Enable TLS 1.3 for every task.
+ task.env = task.env || {};
+ task.env.NSS_ENABLE_TLS_1_3 = "1";
+
+ return task;
+});
+
+/*****************************************************************************/
+
+export default async function main() {
+ await scheduleLinux("Linux 32 (opt)", {
+ env: {BUILD_OPT: "1"},
+ platform: "linux32",
+ image: LINUX_IMAGE
+ });
+
+ await scheduleLinux("Linux 32 (debug)", {
+ platform: "linux32",
+ collection: "debug",
+ image: LINUX_IMAGE
+ });
+
+ await scheduleLinux("Linux 64 (opt)", {
+ env: {USE_64: "1", BUILD_OPT: "1"},
+ platform: "linux64",
+ image: LINUX_IMAGE
+ });
+
+ await scheduleLinux("Linux 64 (debug)", {
+ env: {USE_64: "1"},
+ platform: "linux64",
+ collection: "debug",
+ image: LINUX_IMAGE
+ });
+
+ await scheduleLinux("Linux 64 (debug, gyp)", {
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/build_gyp.sh"
+ ],
+ platform: "linux64",
+ collection: "gyp",
+ image: LINUX_IMAGE
+ });
+
+ await scheduleLinux("Linux 64 (debug, gyp, asan, ubsan)", {
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/build_gyp.sh -g -v --ubsan --asan"
+ ],
+ env: {
+ ASAN_OPTIONS: "detect_odr_violation=0", // bug 1316276
+ UBSAN_OPTIONS: "print_stacktrace=1",
+ NSS_DISABLE_ARENA_FREE_LIST: "1",
+ NSS_DISABLE_UNLOAD: "1",
+ CC: "clang",
+ CCC: "clang++"
+ },
+ platform: "linux64",
+ collection: "gyp-asan",
+ image: LINUX_IMAGE
+ });
+
+ await scheduleLinux("Linux 64 (ASan, debug)", {
+ env: {
+ UBSAN_OPTIONS: "print_stacktrace=1",
+ NSS_DISABLE_ARENA_FREE_LIST: "1",
+ NSS_DISABLE_UNLOAD: "1",
+ CC: "clang",
+ CCC: "clang++",
+ USE_UBSAN: "1",
+ USE_ASAN: "1",
+ USE_64: "1"
+ },
+ platform: "linux64",
+ collection: "asan",
+ image: LINUX_IMAGE
+ });
+
+ await scheduleWindows("Windows 2012 64 (opt)", {
+ env: {BUILD_OPT: "1"}
+ });
+
+ await scheduleWindows("Windows 2012 64 (debug)", {
+ collection: "debug"
+ });
+
+ await scheduleFuzzing();
+
+ await scheduleTestBuilds();
+
+ await scheduleTools();
+
+ await scheduleLinux("Linux 32 (ARM, debug)", {
+ image: "franziskus/nss-arm-ci",
+ provisioner: "localprovisioner",
+ collection: "arm-debug",
+ workerType: "nss-rpi",
+ platform: "linux32",
+ maxRunTime: 7200,
+ tier: 3
+ });
+}
+
+/*****************************************************************************/
+
+async function scheduleLinux(name, base) {
+ // Build base definition.
+ let build_base = merge({
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/build.sh"
+ ],
+ artifacts: {
+ public: {
+ expires: 24 * 7,
+ type: "directory",
+ path: "/home/worker/artifacts"
+ }
+ },
+ kind: "build",
+ symbol: "B"
+ }, base);
+
+ // The task that builds NSPR+NSS.
+ let task_build = queue.scheduleTask(merge(build_base, {name}));
+
+ // The task that generates certificates.
+ let task_cert = queue.scheduleTask(merge(build_base, {
+ name: "Certificates",
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/gen_certs.sh"
+ ],
+ parent: task_build,
+ symbol: "Certs"
+ }));
+
+ // Schedule tests.
+ scheduleTests(task_build, task_cert, merge(base, {
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/run_tests.sh"
+ ]
+ }));
+
+ // Extra builds.
+ let extra_base = merge({group: "Builds"}, build_base);
+ queue.scheduleTask(merge(extra_base, {
+ name: `${name} w/ clang-3.9`,
+ env: {
+ CC: "clang",
+ CCC: "clang++",
+ },
+ symbol: "clang-3.9"
+ }));
+
+ queue.scheduleTask(merge(extra_base, {
+ name: `${name} w/ gcc-4.8`,
+ env: {
+ CC: "gcc-4.8",
+ CCC: "g++-4.8"
+ },
+ symbol: "gcc-4.8"
+ }));
+
+ queue.scheduleTask(merge(extra_base, {
+ name: `${name} w/ gcc-6.1`,
+ env: {
+ CC: "gcc-6",
+ CCC: "g++-6"
+ },
+ symbol: "gcc-6.1"
+ }));
+
+ queue.scheduleTask(merge(extra_base, {
+ name: `${name} w/ NSS_DISABLE_LIBPKIX=1`,
+ env: {NSS_DISABLE_LIBPKIX: "1"},
+ symbol: "noLibpkix"
+ }));
+
+ return queue.submit();
+}
+
+/*****************************************************************************/
+
+async function scheduleFuzzing() {
+ let base = {
+ env: {
+ // bug 1316276
+ ASAN_OPTIONS: "allocator_may_return_null=1:detect_odr_violation=0",
+ UBSAN_OPTIONS: "print_stacktrace=1",
+ NSS_DISABLE_ARENA_FREE_LIST: "1",
+ NSS_DISABLE_UNLOAD: "1",
+ CC: "clang",
+ CCC: "clang++"
+ },
+ platform: "linux64",
+ collection: "fuzz",
+ image: LINUX_IMAGE
+ };
+
+ // Build base definition.
+ let build_base = merge({
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && " +
+ "nss/automation/taskcluster/scripts/build_gyp.sh -g -v --fuzz"
+ ],
+ artifacts: {
+ public: {
+ expires: 24 * 7,
+ type: "directory",
+ path: "/home/worker/artifacts"
+ }
+ },
+ kind: "build",
+ symbol: "B"
+ }, base);
+
+ // The task that builds NSPR+NSS.
+ let task_build = queue.scheduleTask(merge(build_base, {
+ name: "Linux x64 (debug, fuzz)"
+ }));
+
+ // Schedule tests.
+ queue.scheduleTask(merge(base, {
+ parent: task_build,
+ name: "Gtests",
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/run_tests.sh"
+ ],
+ env: {GTESTFILTER: "*Fuzz*"},
+ tests: "ssl_gtests gtests",
+ cycle: "standard",
+ symbol: "Gtest",
+ kind: "test"
+ }));
+
+ queue.scheduleTask(merge(base, {
+ parent: task_build,
+ name: "Cert",
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/fuzz.sh " +
+ "cert nss/fuzz/corpus/cert -max_total_time=300"
+ ],
+ // Need a privileged docker container to remove this.
+ env: {ASAN_OPTIONS: "detect_leaks=0"},
+ symbol: "SCert",
+ kind: "test"
+ }));
+
+ queue.scheduleTask(merge(base, {
+ parent: task_build,
+ name: "SPKI",
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/fuzz.sh " +
+ "spki nss/fuzz/corpus/spki -max_total_time=300"
+ ],
+ // Need a privileged docker container to remove this.
+ env: {ASAN_OPTIONS: "detect_leaks=0"},
+ symbol: "SPKI",
+ kind: "test"
+ }));
+
+ return queue.submit();
+}
+
+/*****************************************************************************/
+
+async function scheduleTestBuilds() {
+ let base = {
+ platform: "linux64",
+ collection: "gyp",
+ group: "Test",
+ image: LINUX_IMAGE
+ };
+
+ // Build base definition.
+ let build = merge({
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && " +
+ "nss/automation/taskcluster/scripts/build_gyp.sh -g -v --test"
+ ],
+ artifacts: {
+ public: {
+ expires: 24 * 7,
+ type: "directory",
+ path: "/home/worker/artifacts"
+ }
+ },
+ kind: "build",
+ symbol: "B",
+ name: "Linux 64 (debug, gyp, test)"
+ }, base);
+
+ // The task that builds NSPR+NSS.
+ let task_build = queue.scheduleTask(build);
+
+ // Schedule tests.
+ queue.scheduleTask(merge(base, {
+ parent: task_build,
+ name: "mpi",
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/run_tests.sh"
+ ],
+ tests: "mpi",
+ cycle: "standard",
+ symbol: "mpi",
+ kind: "test"
+ }));
+
+ return queue.submit();
+}
+
+
+/*****************************************************************************/
+
+async function scheduleWindows(name, base) {
+ base = merge(base, {
+ workerType: "nss-win2012r2",
+ platform: "windows2012-64",
+ env: {
+ PATH: "c:\\mozilla-build\\python;c:\\mozilla-build\\msys\\local\\bin;" +
+ "c:\\mozilla-build\\7zip;c:\\mozilla-build\\info-zip;" +
+ "c:\\mozilla-build\\python\\Scripts;c:\\mozilla-build\\yasm;" +
+ "c:\\mozilla-build\\msys\\bin;c:\\Windows\\system32;" +
+ "c:\\mozilla-build\\upx391w;c:\\mozilla-build\\moztools-x64\\bin;" +
+ "c:\\mozilla-build\\wget",
+ DOMSUF: "localdomain",
+ HOST: "localhost",
+ USE_64: "1"
+ }
+ });
+
+ // Build base definition.
+ let build_base = merge(base, {
+ command: [
+ WINDOWS_CHECKOUT_CMD,
+ "bash -c nss/automation/taskcluster/windows/build.sh"
+ ],
+ artifacts: [{
+ expires: 24 * 7,
+ type: "directory",
+ path: "public\\build"
+ }],
+ kind: "build",
+ symbol: "B"
+ });
+
+ // The task that builds NSPR+NSS.
+ let task_build = queue.scheduleTask(merge(build_base, {name}));
+
+ // The task that generates certificates.
+ let task_cert = queue.scheduleTask(merge(build_base, {
+ name: "Certificates",
+ command: [
+ WINDOWS_CHECKOUT_CMD,
+ "bash -c nss/automation/taskcluster/windows/gen_certs.sh"
+ ],
+ parent: task_build,
+ symbol: "Certs"
+ }));
+
+ // Schedule tests.
+ scheduleTests(task_build, task_cert, merge(base, {
+ command: [
+ WINDOWS_CHECKOUT_CMD,
+ "bash -c nss/automation/taskcluster/windows/run_tests.sh"
+ ]
+ }));
+
+ return queue.submit();
+}
+
+/*****************************************************************************/
+
+function scheduleTests(task_build, task_cert, test_base) {
+ test_base = merge({kind: "test"}, test_base);
+
+ // Schedule tests that do NOT need certificates.
+ let no_cert_base = merge(test_base, {parent: task_build});
+ queue.scheduleTask(merge(no_cert_base, {
+ name: "Gtests", symbol: "Gtest", tests: "ssl_gtests gtests", cycle: "standard"
+ }));
+ queue.scheduleTask(merge(no_cert_base, {
+ name: "Bogo tests", symbol: "Bogo", tests: "bogo", cycle: "standard"
+ }));
+ queue.scheduleTask(merge(no_cert_base, {
+ name: "Chains tests", symbol: "Chains", tests: "chains"
+ }));
+ queue.scheduleTask(merge(no_cert_base, {
+ name: "Cipher tests", symbol: "Cipher", tests: "cipher"
+ }));
+ queue.scheduleTask(merge(no_cert_base, {
+ name: "EC tests", symbol: "EC", tests: "ec"
+ }));
+ queue.scheduleTask(merge(no_cert_base, {
+ name: "Lowhash tests", symbol: "Lowhash", tests: "lowhash"
+ }));
+ queue.scheduleTask(merge(no_cert_base, {
+ name: "SDR tests", symbol: "SDR", tests: "sdr"
+ }));
+
+ // Schedule tests that need certificates.
+ let cert_base = merge(test_base, {parent: task_cert});
+ queue.scheduleTask(merge(cert_base, {
+ name: "CRMF tests", symbol: "CRMF", tests: "crmf"
+ }));
+ queue.scheduleTask(merge(cert_base, {
+ name: "DB tests", symbol: "DB", tests: "dbtests"
+ }));
+ queue.scheduleTask(merge(cert_base, {
+ name: "FIPS tests", symbol: "FIPS", tests: "fips"
+ }));
+ queue.scheduleTask(merge(cert_base, {
+ name: "Merge tests", symbol: "Merge", tests: "merge"
+ }));
+ queue.scheduleTask(merge(cert_base, {
+ name: "S/MIME tests", symbol: "SMIME", tests: "smime"
+ }));
+ queue.scheduleTask(merge(cert_base, {
+ name: "Tools tests", symbol: "Tools", tests: "tools"
+ }));
+
+ // SSL tests, need certificates too.
+ let ssl_base = merge(cert_base, {tests: "ssl", group: "SSL"});
+ queue.scheduleTask(merge(ssl_base, {
+ name: "SSL tests (standard)", symbol: "standard", cycle: "standard"
+ }));
+ queue.scheduleTask(merge(ssl_base, {
+ name: "SSL tests (pkix)", symbol: "pkix", cycle: "pkix"
+ }));
+ queue.scheduleTask(merge(ssl_base, {
+ name: "SSL tests (sharedb)", symbol: "sharedb", cycle: "sharedb"
+ }));
+ queue.scheduleTask(merge(ssl_base, {
+ name: "SSL tests (upgradedb)", symbol: "upgradedb", cycle: "upgradedb"
+ }));
+}
+
+/*****************************************************************************/
+
+async function scheduleTools() {
+ let base = {
+ image: LINUX_IMAGE,
+ platform: "nss-tools",
+ kind: "test"
+ };
+
+ queue.scheduleTask(merge(base, {
+ symbol: "clang-format-3.9",
+ name: "clang-format-3.9",
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/run_clang_format.sh"
+ ]
+ }));
+
+ queue.scheduleTask(merge(base, {
+ symbol: "scan-build-3.9",
+ name: "scan-build-3.9",
+ env: {
+ USE_64: "1",
+ CC: "clang",
+ CCC: "clang++",
+ },
+ artifacts: {
+ public: {
+ expires: 24 * 7,
+ type: "directory",
+ path: "/home/worker/artifacts"
+ }
+ },
+ command: [
+ "/bin/bash",
+ "-c",
+ "bin/checkout.sh && nss/automation/taskcluster/scripts/run_scan_build.sh"
+ ]
+ }));
+
+ return queue.submit();
+}
diff --git a/security/nss/automation/taskcluster/graph/src/image_builder.js b/security/nss/automation/taskcluster/graph/src/image_builder.js
new file mode 100644
index 000000000..bc90e0242
--- /dev/null
+++ b/security/nss/automation/taskcluster/graph/src/image_builder.js
@@ -0,0 +1,62 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+import * as queue from "./queue";
+import context_hash from "./context_hash";
+import taskcluster from "taskcluster-client";
+
+async function taskHasImageArtifact(taskId) {
+ let queue = new taskcluster.Queue();
+ let {artifacts} = await queue.listLatestArtifacts(taskId);
+ return artifacts.some(artifact => artifact.name == "public/image.tar");
+}
+
+async function findTaskWithImageArtifact(ns) {
+ let index = new taskcluster.Index();
+ let {taskId} = await index.findTask(ns);
+ let has_image = await taskHasImageArtifact(taskId);
+ return has_image ? taskId : null;
+}
+
+export async function findTask({name, path}) {
+ let hash = await context_hash(path);
+ let ns = `docker.images.v1.${process.env.TC_PROJECT}.${name}.hash.${hash}`;
+ return findTaskWithImageArtifact(ns).catch(() => null);
+}
+
+export async function buildTask({name, path}) {
+ let hash = await context_hash(path);
+ let ns = `docker.images.v1.${process.env.TC_PROJECT}.${name}.hash.${hash}`;
+
+ return {
+ name: "Image Builder",
+ image: "taskcluster/image_builder:0.1.5",
+ routes: ["index." + ns],
+ env: {
+ HEAD_REPOSITORY: process.env.NSS_HEAD_REPOSITORY,
+ BASE_REPOSITORY: process.env.NSS_HEAD_REPOSITORY,
+ HEAD_REV: process.env.NSS_HEAD_REVISION,
+ HEAD_REF: process.env.NSS_HEAD_REVISION,
+ PROJECT: process.env.TC_PROJECT,
+ CONTEXT_PATH: path,
+ HASH: hash
+ },
+ artifacts: {
+ "public/image.tar": {
+ type: "file",
+ expires: 24 * 90,
+ path: "/artifacts/image.tar"
+ }
+ },
+ command: [
+ "/bin/bash",
+ "-c",
+ "/home/worker/bin/build_image.sh"
+ ],
+ platform: "nss-decision",
+ features: ["dind"],
+ kind: "build",
+ symbol: "I"
+ };
+}
diff --git a/security/nss/automation/taskcluster/graph/src/index.js b/security/nss/automation/taskcluster/graph/src/index.js
new file mode 100644
index 000000000..4153e1b18
--- /dev/null
+++ b/security/nss/automation/taskcluster/graph/src/index.js
@@ -0,0 +1,14 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+import * as try_syntax from "./try_syntax";
+import extend from "./extend";
+
+// Init try syntax filter.
+if (process.env.TC_PROJECT == "nss-try") {
+ try_syntax.initFilter();
+}
+
+// Extend the task graph.
+extend().catch(console.error);
diff --git a/security/nss/automation/taskcluster/graph/src/merge.js b/security/nss/automation/taskcluster/graph/src/merge.js
new file mode 100644
index 000000000..17043dd8e
--- /dev/null
+++ b/security/nss/automation/taskcluster/graph/src/merge.js
@@ -0,0 +1,10 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+import {recursive as merge} from "merge";
+
+// We always want to clone.
+export default function (...args) {
+ return merge(true, ...args);
+}
diff --git a/security/nss/automation/taskcluster/graph/src/queue.js b/security/nss/automation/taskcluster/graph/src/queue.js
new file mode 100644
index 000000000..2a4a7b3fe
--- /dev/null
+++ b/security/nss/automation/taskcluster/graph/src/queue.js
@@ -0,0 +1,242 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+import {clone} from "merge";
+import merge from "./merge";
+import slugid from "slugid";
+import taskcluster from "taskcluster-client";
+import * as image_builder from "./image_builder";
+
+let maps = [];
+let filters = [];
+
+let tasks = new Map();
+let image_tasks = new Map();
+
+let queue = new taskcluster.Queue({
+ baseUrl: "http://taskcluster/queue/v1"
+});
+
+function fromNow(hours) {
+ let d = new Date();
+ d.setHours(d.getHours() + (hours|0));
+ return d.toJSON();
+}
+
+function parseRoutes(routes) {
+ return [
+ `tc-treeherder.v2.${process.env.TC_PROJECT}.${process.env.NSS_HEAD_REVISION}.${process.env.NSS_PUSHLOG_ID}`,
+ ...routes
+ ];
+}
+
+function parseFeatures(list) {
+ return list.reduce((map, feature) => {
+ map[feature] = true;
+ return map;
+ }, {});
+}
+
+function parseArtifacts(artifacts) {
+ let copy = clone(artifacts);
+ Object.keys(copy).forEach(key => {
+ copy[key].expires = fromNow(copy[key].expires);
+ });
+ return copy;
+}
+
+function parseCollection(name) {
+ let collection = {};
+ collection[name] = true;
+ return collection;
+}
+
+function parseTreeherder(def) {
+ let treeherder = {
+ build: {
+ platform: def.platform
+ },
+ machine: {
+ platform: def.platform
+ },
+ symbol: def.symbol,
+ jobKind: def.kind
+ };
+
+ if (def.group) {
+ treeherder.groupSymbol = def.group;
+ }
+
+ if (def.collection) {
+ treeherder.collection = parseCollection(def.collection);
+ }
+
+ if (def.tier) {
+ treeherder.tier = def.tier;
+ }
+
+ return treeherder;
+}
+
+function convertTask(def) {
+ let dependencies = [];
+
+ let env = merge({
+ NSS_HEAD_REPOSITORY: process.env.NSS_HEAD_REPOSITORY,
+ NSS_HEAD_REVISION: process.env.NSS_HEAD_REVISION
+ }, def.env || {});
+
+ if (def.parent) {
+ dependencies.push(def.parent);
+ env.TC_PARENT_TASK_ID = def.parent;
+ }
+
+ if (def.tests) {
+ env.NSS_TESTS = def.tests;
+ }
+
+ if (def.cycle) {
+ env.NSS_CYCLES = def.cycle;
+ }
+
+ let payload = {
+ env,
+ command: def.command,
+ maxRunTime: def.maxRunTime || 3600
+ };
+
+ if (def.image) {
+ payload.image = def.image;
+ }
+
+ if (def.features) {
+ payload.features = parseFeatures(def.features);
+ }
+
+ if (def.artifacts) {
+ payload.artifacts = parseArtifacts(def.artifacts);
+ }
+
+ return {
+ provisionerId: def.provisioner || "aws-provisioner-v1",
+ workerType: def.workerType || "hg-worker",
+ schedulerId: "task-graph-scheduler",
+
+ created: fromNow(0),
+ deadline: fromNow(24),
+
+ dependencies,
+ routes: parseRoutes(def.routes || []),
+
+ metadata: {
+ name: def.name,
+ description: def.name,
+ owner: process.env.TC_OWNER,
+ source: process.env.TC_SOURCE
+ },
+
+ payload,
+
+ extra: {
+ treeherder: parseTreeherder(def)
+ }
+ };
+}
+
+export function map(fun) {
+ maps.push(fun);
+}
+
+export function filter(fun) {
+ filters.push(fun);
+}
+
+export function scheduleTask(def) {
+ let taskId = slugid.v4();
+ tasks.set(taskId, merge({}, def));
+ return taskId;
+}
+
+export async function submit() {
+ let promises = new Map();
+
+ for (let [taskId, task] of tasks) {
+ // Allow filtering tasks before we schedule them.
+ if (!filters.every(filter => filter(task))) {
+ continue;
+ }
+
+ // Allow changing tasks before we schedule them.
+ maps.forEach(map => { task = map(merge({}, task)) });
+
+ let log_id = `${task.name} @ ${task.platform}[${task.collection || "opt"}]`;
+ console.log(`+ Submitting ${log_id}.`);
+
+ let parent = task.parent;
+
+ // Convert the task definition.
+ task = await convertTask(task);
+
+ // Convert the docker image definition.
+ let image_def = task.payload.image;
+ if (image_def && image_def.hasOwnProperty("path")) {
+ let key = `${image_def.name}:${image_def.path}`;
+ let data = {};
+
+ // Check the cache first.
+ if (image_tasks.has(key)) {
+ data = image_tasks.get(key);
+ } else {
+ data.taskId = await image_builder.findTask(image_def);
+ data.isPending = !data.taskId;
+
+ // No task found.
+ if (data.isPending) {
+ let image_task = await image_builder.buildTask(image_def);
+
+ // Schedule a new image builder task immediately.
+ data.taskId = slugid.v4();
+
+ try {
+ await queue.createTask(data.taskId, convertTask(image_task));
+ } catch (e) {
+ console.error("! FAIL: Scheduling image builder task failed.");
+ continue; /* Skip this task on failure. */
+ }
+ }
+
+ // Store in cache.
+ image_tasks.set(key, data);
+ }
+
+ if (data.isPending) {
+ task.dependencies.push(data.taskId);
+ }
+
+ task.payload.image = {
+ path: "public/image.tar",
+ taskId: data.taskId,
+ type: "task-image"
+ };
+ }
+
+ // Wait for the parent task to be created before scheduling dependants.
+ let predecessor = parent ? promises.get(parent) : Promise.resolve();
+
+ promises.set(taskId, predecessor.then(() => {
+ // Schedule the task.
+ return queue.createTask(taskId, task).catch(err => {
+ console.error(`! FAIL: Scheduling ${log_id} failed.`, err);
+ });
+ }));
+ }
+
+ // Wait for all requests to finish.
+ if (promises.length) {
+ await Promise.all([...promises.values()]);
+ console.log("=== Total:", promises.length, "tasks. ===");
+ }
+
+ tasks.clear();
+}
diff --git a/security/nss/automation/taskcluster/graph/src/try_syntax.js b/security/nss/automation/taskcluster/graph/src/try_syntax.js
new file mode 100644
index 000000000..695c9e92f
--- /dev/null
+++ b/security/nss/automation/taskcluster/graph/src/try_syntax.js
@@ -0,0 +1,159 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+import * as queue from "./queue";
+import intersect from "intersect";
+import parse_args from "minimist";
+
+function parseOptions(opts) {
+ opts = parse_args(opts.split(/\s+/), {
+ default: {build: "do", platform: "all", unittests: "none", tools: "none"},
+ alias: {b: "build", p: "platform", u: "unittests", t: "tools", e: "extra-builds"},
+ string: ["build", "platform", "unittests", "tools", "extra-builds"]
+ });
+
+ // Parse build types (d=debug, o=opt).
+ let builds = intersect(opts.build.split(""), ["d", "o"]);
+
+ // If the given value is nonsense default to debug and opt builds.
+ if (builds.length == 0) {
+ builds = ["d", "o"];
+ }
+
+ // Parse platforms.
+ let allPlatforms = ["linux", "linux64", "linux64-asan", "win64", "arm",
+ "linux64-gyp", "linux64-gyp-asan", "linux64-fuzz"];
+ let platforms = intersect(opts.platform.split(/\s*,\s*/), allPlatforms);
+
+ // If the given value is nonsense or "none" default to all platforms.
+ if (platforms.length == 0 && opts.platform != "none") {
+ platforms = allPlatforms;
+ }
+
+ // Parse unit tests.
+ let aliases = {"gtests": "gtest"};
+ let allUnitTests = ["bogo", "crmf", "chains", "cipher", "db", "ec", "fips",
+ "gtest", "lowhash", "merge", "sdr", "smime", "tools",
+ "ssl", "mpi", "scert", "spki"];
+ let unittests = intersect(opts.unittests.split(/\s*,\s*/).map(t => {
+ return aliases[t] || t;
+ }), allUnitTests);
+
+ // If the given value is "all" run all tests.
+ // If it's nonsense then don't run any tests.
+ if (opts.unittests == "all") {
+ unittests = allUnitTests;
+ } else if (unittests.length == 0) {
+ unittests = [];
+ }
+
+ // Parse tools.
+ let allTools = ["clang-format", "scan-build"];
+ let tools = intersect(opts.tools.split(/\s*,\s*/), allTools);
+
+ // If the given value is "all" run all tools.
+ // If it's nonsense then don't run any tools.
+ if (opts.tools == "all") {
+ tools = allTools;
+ } else if (tools.length == 0) {
+ tools = [];
+ }
+
+ return {
+ builds: builds,
+ platforms: platforms,
+ unittests: unittests,
+ extra: (opts.e == "all"),
+ tools: tools
+ };
+}
+
+function filter(opts) {
+ return function (task) {
+ // Filter tools. We can immediately return here as those
+ // are not affected by platform or build type selectors.
+ if (task.platform == "nss-tools") {
+ return opts.tools.some(tool => {
+ return task.symbol.toLowerCase().startsWith(tool);
+ });
+ }
+
+ // Filter unit tests.
+ if (task.tests) {
+ let found = opts.unittests.some(test => {
+ // TODO: think of something more intelligent here.
+ if (task.symbol.toLowerCase().startsWith("mpi") && test == "mpi") {
+ return true;
+ }
+ return (task.group || task.symbol).toLowerCase().startsWith(test);
+ });
+
+ if (!found) {
+ return false;
+ }
+ }
+
+ // Filter extra builds.
+ if (task.group == "Builds" && !opts.extra) {
+ return false;
+ }
+
+ let coll = name => name == (task.collection || "opt");
+
+ // Filter by platform.
+ let found = opts.platforms.some(platform => {
+ let aliases = {
+ "linux": "linux32",
+ "linux64-asan": "linux64",
+ "linux64-fuzz": "linux64",
+ "linux64-gyp": "linux64",
+ "linux64-gyp-asan": "linux64",
+ "win64": "windows2012-64",
+ "arm": "linux32"
+ };
+
+ // Check the platform name.
+ let keep = (task.platform == (aliases[platform] || platform));
+
+ // Additional checks.
+ if (platform == "linux64-asan") {
+ keep &= coll("asan");
+ } else if (platform == "arm") {
+ keep &= coll("arm-opt") || coll("arm-debug");
+ } else if (platform == "linux64-gyp") {
+ keep &= coll("gyp");
+ } else if (platform == "linux64-gyp-asan") {
+ keep &= coll("gyp-asan");
+ } else if (platform == "linux64-fuzz") {
+ keep &= coll("fuzz");
+ } else {
+ keep &= coll("opt") || coll("debug");
+ }
+
+ return keep;
+ });
+
+ if (!found) {
+ return false;
+ }
+
+ // Finally, filter by build type.
+ let isDebug = coll("debug") || coll("asan") || coll("arm-debug") ||
+ coll("gyp") || coll("fuzz");
+ return (isDebug && opts.builds.includes("d")) ||
+ (!isDebug && opts.builds.includes("o"));
+ }
+}
+
+export function initFilter() {
+ let comment = process.env.TC_COMMENT || "";
+
+ // Check for try syntax in changeset comment.
+ let match = comment.match(/^\s*try:\s*(.*)\s*$/);
+
+ // Add try syntax filter.
+ if (match) {
+ queue.filter(filter(parseOptions(match[1])));
+ }
+}