diff options
Diffstat (limited to 'security/nss/automation/taskcluster/graph/src')
5 files changed, 326 insertions, 142 deletions
diff --git a/security/nss/automation/taskcluster/graph/src/extend.js b/security/nss/automation/taskcluster/graph/src/extend.js index 1302602bc..2a1a13835 100644 --- a/security/nss/automation/taskcluster/graph/src/extend.js +++ b/security/nss/automation/taskcluster/graph/src/extend.js @@ -103,15 +103,6 @@ queue.filter(task => { return false; } - if (task.group == "Test") { - // Don't run test builds on old make platforms, and not for fips gyp. - // Disable on aarch64, see bug 1488331. - if (task.collection == "make" || task.collection == "fips" - || task.platform == "aarch64") { - return false; - } - } - // Don't run all additional hardware tests on ARM. if (task.group == "Cipher" && task.platform == "aarch64" && task.env && (task.env.NSS_DISABLE_PCLMUL == "1" || task.env.NSS_DISABLE_HW_AES == "1" @@ -130,19 +121,37 @@ queue.map(task => { } } - // We don't run FIPS SSL tests if (task.tests == "ssl") { if (!task.env) { task.env = {}; } - task.env.NSS_SSL_TESTS = "crl iopr policy"; + + // Stress tests to not include other SSL tests + if (task.symbol == "stress") { + task.env.NSS_SSL_TESTS = "normal_normal"; + } else { + task.env.NSS_SSL_TESTS = "crl iopr policy normal_normal"; + } + + // FIPS runs + if (task.collection == "fips") { + task.env.NSS_SSL_TESTS += " fips_fips fips_normal normal_fips"; + } + + if (task.platform == "mac") { + task.maxRunTime = 7200; + } } // Windows is slow. - if (task.platform == "windows2012-64" && task.tests == "chains") { + if ((task.platform == "windows2012-32" || task.platform == "windows2012-64") && + task.tests == "chains") { task.maxRunTime = 7200; } + if (task.platform == "mac" && task.tests == "tools") { + task.maxRunTime = 7200; + } return task; }); @@ -304,6 +313,10 @@ export default async function main() { await scheduleMac("Mac (opt)", {collection: "opt"}, "--opt"); await scheduleMac("Mac (debug)", {collection: "debug"}); + + // Must be executed after all other tasks are scheduled + queue.clearFilters(); + await scheduleCodeReview(); } @@ -321,12 +334,7 @@ async function scheduleMac(name, base, args = "") { }); // Build base definition. - let build_base = merge(mac_base, { - command: [ - MAC_CHECKOUT_CMD, - ["bash", "-c", - "nss/automation/taskcluster/scripts/build_gyp.sh", args] - ], + let build_base_without_command_symbol = merge(mac_base, { provisioner: "localprovisioner", workerType: "nss-macos-10-12", platform: "mac", @@ -337,6 +345,34 @@ async function scheduleMac(name, base, args = "") { path: "public" }], kind: "build", + }); + + let gyp_cmd = "nss/automation/taskcluster/scripts/build_gyp.sh "; + + if (!("collection" in base) || + (base.collection != "make" && + base.collection != "asan" && + base.collection != "fips" && + base.collection != "fuzz")) { + let nspr_gyp = gyp_cmd + "--nspr-only --nspr-test-build --nspr-test-run "; + let nspr_build = merge(build_base_without_command_symbol, { + command: [ + MAC_CHECKOUT_CMD, + ["bash", "-c", + nspr_gyp + args] + ], + symbol: "NSPR" + }); + // The task that tests NSPR. + let nspr_task_build = queue.scheduleTask(merge(nspr_build, {name})); + } + + let build_base = merge(build_base_without_command_symbol, { + command: [ + MAC_CHECKOUT_CMD, + ["bash", "-c", + gyp_cmd + args] + ], symbol: "B" }); @@ -370,25 +406,54 @@ async function scheduleMac(name, base, args = "") { /*****************************************************************************/ async function scheduleLinux(name, overrides, args = "") { + let checkout_and_gyp = "bin/checkout.sh && nss/automation/taskcluster/scripts/build_gyp.sh "; + let artifacts_and_kind = { + artifacts: { + public: { + expires: 24 * 7, + type: "directory", + path: "/home/worker/artifacts" + } + }, + kind: "build", + }; + + if (!("collection" in overrides) || + (overrides.collection != "make" && + overrides.collection != "asan" && + overrides.collection != "fips" && + overrides.collection != "fuzz")) { + let nspr_gyp = checkout_and_gyp + "--nspr-only --nspr-test-build --nspr-test-run "; + + let nspr_base = merge({ + command: [ + "/bin/bash", + "-c", + nspr_gyp + args + ], + }, overrides); + let nspr_without_symbol = merge(nspr_base, artifacts_and_kind); + let nspr_build = merge(nspr_without_symbol, { + symbol: "NSPR", + }); + // The task that tests NSPR. + let nspr_task_build = queue.scheduleTask(merge(nspr_build, {name})); + } + // Construct a base definition. This takes |overrides| second because // callers expect to be able to overwrite the |command| key. let base = merge({ command: [ "/bin/bash", "-c", - "bin/checkout.sh && nss/automation/taskcluster/scripts/build_gyp.sh " + args + checkout_and_gyp + args ], }, overrides); + + let base_without_symbol = merge(base, artifacts_and_kind); + // The base for building. - let build_base = merge(base, { - artifacts: { - public: { - expires: 24 * 7, - type: "directory", - path: "/home/worker/artifacts" - } - }, - kind: "build", + let build_base = merge(base_without_symbol, { symbol: "B", }); @@ -468,7 +533,6 @@ async function scheduleLinux(name, overrides, args = "") { }, symbol: "clang-4" })); - queue.scheduleTask(merge(extra_base, { name: `${name} w/ gcc-4.4`, image: LINUX_GCC44_IMAGE, @@ -528,8 +592,6 @@ async function scheduleLinux(name, overrides, args = "") { symbol: "modular" })); - await scheduleTestBuilds(name + " Test", merge(base, {group: "Test"}), args); - return queue.submit(); } @@ -574,7 +636,7 @@ async function scheduleFuzzing() { "/bin/bash", "-c", "bin/checkout.sh && " + - "nss/automation/taskcluster/scripts/build_gyp.sh -g -v --fuzz" + "nss/automation/taskcluster/scripts/build_gyp.sh --fuzz" ], artifacts: { public: { @@ -601,7 +663,7 @@ async function scheduleFuzzing() { "/bin/bash", "-c", "bin/checkout.sh && " + - "nss/automation/taskcluster/scripts/build_gyp.sh -g -v --fuzz=tls" + "nss/automation/taskcluster/scripts/build_gyp.sh --fuzz=tls" ], })); @@ -679,7 +741,7 @@ async function scheduleFuzzing32() { "/bin/bash", "-c", "bin/checkout.sh && " + - "nss/automation/taskcluster/scripts/build_gyp.sh -g -v --fuzz -t ia32" + "nss/automation/taskcluster/scripts/build_gyp.sh --fuzz -t ia32" ], artifacts: { public: { @@ -706,7 +768,7 @@ async function scheduleFuzzing32() { "/bin/bash", "-c", "bin/checkout.sh && " + - "nss/automation/taskcluster/scripts/build_gyp.sh -g -v --fuzz=tls -t ia32" + "nss/automation/taskcluster/scripts/build_gyp.sh --fuzz=tls -t ia32" ], })); @@ -763,74 +825,9 @@ async function scheduleFuzzing32() { /*****************************************************************************/ -async function scheduleTestBuilds(name, base, args = "") { - // Build base definition. - let build = merge(base, { - command: [ - "/bin/bash", - "-c", - "bin/checkout.sh && " + - "nss/automation/taskcluster/scripts/build_gyp.sh -g -v --test --ct-verif " + args - ], - artifacts: { - public: { - expires: 24 * 7, - type: "directory", - path: "/home/worker/artifacts" - } - }, - kind: "build", - symbol: "B", - name: `${name} build`, - }); - - // On linux we have a specialized build image for building. - if (build.platform === "linux32" || build.platform === "linux64") { - build = merge(build, { - image: LINUX_BUILDS_IMAGE, - }); - } - - // The task that builds NSPR+NSS. - let task_build = queue.scheduleTask(build); - - // Schedule tests. - queue.scheduleTask(merge(base, { - parent: task_build, - name: `${name} mpi tests`, - command: [ - "/bin/bash", - "-c", - "bin/checkout.sh && nss/automation/taskcluster/scripts/run_tests.sh" - ], - tests: "mpi", - cycle: "standard", - symbol: "mpi", - kind: "test" - })); - queue.scheduleTask(merge(base, { - parent: task_build, - command: [ - "/bin/bash", - "-c", - "bin/checkout.sh && nss/automation/taskcluster/scripts/run_tests.sh" - ], - name: `${name} gtests`, - symbol: "Gtest", - tests: "gtests", - cycle: "standard", - kind: "test" - })); - - return queue.submit(); -} - - -/*****************************************************************************/ - async function scheduleWindows(name, base, build_script) { base = merge(base, { - workerType: "nss-win2012r2", + workerType: "win2012r2", env: { PATH: "c:\\mozilla-build\\bin;c:\\mozilla-build\\python;" + "c:\\mozilla-build\\msys\\local\\bin;c:\\mozilla-build\\7zip;" + @@ -840,24 +837,49 @@ async function scheduleWindows(name, base, build_script) { "c:\\mozilla-build\\moztools-x64\\bin;c:\\mozilla-build\\wget", DOMSUF: "localdomain", HOST: "localhost", - } + }, + features: ["taskclusterProxy"], + scopes: ["project:releng:services/tooltool/api/download/internal"], }); - // Build base definition. - let build_base = merge(base, { - command: [ - WINDOWS_CHECKOUT_CMD, - `bash -c 'nss/automation/taskcluster/windows/${build_script}'` - ], + let artifacts_and_kind = { artifacts: [{ expires: 24 * 7, type: "directory", path: "public\\build" }], kind: "build", + }; + + let build_without_command_symbol = merge(base, artifacts_and_kind); + + // Build base definition. + let build_base = merge(build_without_command_symbol, { + command: [ + WINDOWS_CHECKOUT_CMD, + `bash -c 'nss/automation/taskcluster/windows/${build_script}'` + ], symbol: "B" }); + if (!("collection" in base) || + (base.collection != "make" && + base.collection != "asan" && + base.collection != "fips" && + base.collection != "fuzz")) { + let nspr_gyp = + `bash -c 'nss/automation/taskcluster/windows/${build_script} --nspr-only --nspr-test-build --nspr-test-run'`; + let nspr_build = merge(build_without_command_symbol, { + command: [ + WINDOWS_CHECKOUT_CMD, + nspr_gyp + ], + symbol: "NSPR" + }); + // The task that tests NSPR. + let task_build = queue.scheduleTask(merge(nspr_build, {name})); + } + // Make builds run FIPS tests, which need an extra FIPS build. if (base.collection == "make") { let extra_build = queue.scheduleTask(merge(build_base, { @@ -924,9 +946,13 @@ async function scheduleWindows(name, base, build_script) { function scheduleTests(task_build, task_cert, test_base) { test_base = merge(test_base, {kind: "test"}); - - // Schedule tests that do NOT need certificates. let no_cert_base = merge(test_base, {parent: task_build}); + let cert_base = merge(test_base, {parent: task_cert}); + let cert_base_long = merge(cert_base, {maxRunTime: 7200}); + + // Schedule tests that do NOT need certificates. This is defined as + // the test itself not needing certs AND not running under the upgradedb + // cycle (which itself needs certs). If cycle is not defined, default is all. queue.scheduleTask(merge(no_cert_base, { name: "Gtests", symbol: "Gtest", tests: "ssl_gtests gtests", cycle: "standard" })); @@ -948,45 +974,47 @@ function scheduleTests(task_build, task_cert, test_base) { name: "tlsfuzzer tests", symbol: "tlsfuzzer", tests: "tlsfuzzer", cycle: "standard" })); queue.scheduleTask(merge(no_cert_base, { + name: "MPI tests", symbol: "MPI", tests: "mpi", cycle: "standard" + })); + queue.scheduleTask(merge(cert_base, { name: "Chains tests", symbol: "Chains", tests: "chains" })); - queue.scheduleTask(merge(no_cert_base, { + queue.scheduleTask(merge(cert_base_long, { name: "Cipher tests", symbol: "Default", tests: "cipher", group: "Cipher" })); - queue.scheduleTask(merge(no_cert_base, { + queue.scheduleTask(merge(cert_base_long, { name: "Cipher tests", symbol: "NoAESNI", tests: "cipher", env: {NSS_DISABLE_HW_AES: "1"}, group: "Cipher" })); - queue.scheduleTask(merge(no_cert_base, { + queue.scheduleTask(merge(cert_base_long, { name: "Cipher tests", symbol: "NoPCLMUL", tests: "cipher", env: {NSS_DISABLE_PCLMUL: "1"}, group: "Cipher" })); - queue.scheduleTask(merge(no_cert_base, { + queue.scheduleTask(merge(cert_base_long, { name: "Cipher tests", symbol: "NoAVX", tests: "cipher", env: {NSS_DISABLE_AVX: "1"}, group: "Cipher" })); - queue.scheduleTask(merge(no_cert_base, { + queue.scheduleTask(merge(cert_base_long, { name: "Cipher tests", symbol: "NoSSSE3|NEON", tests: "cipher", env: { NSS_DISABLE_ARM_NEON: "1", NSS_DISABLE_SSSE3: "1" }, group: "Cipher" })); - queue.scheduleTask(merge(no_cert_base, { + queue.scheduleTask(merge(cert_base, { name: "EC tests", symbol: "EC", tests: "ec" })); - queue.scheduleTask(merge(no_cert_base, { + queue.scheduleTask(merge(cert_base, { name: "Lowhash tests", symbol: "Lowhash", tests: "lowhash" })); - queue.scheduleTask(merge(no_cert_base, { + queue.scheduleTask(merge(cert_base, { name: "SDR tests", symbol: "SDR", tests: "sdr" })); - queue.scheduleTask(merge(no_cert_base, { + queue.scheduleTask(merge(cert_base, { name: "Policy tests", symbol: "Policy", tests: "policy" })); // Schedule tests that need certificates. - let cert_base = merge(test_base, {parent: task_cert}); queue.scheduleTask(merge(cert_base, { name: "CRMF tests", symbol: "CRMF", tests: "crmf" })); @@ -1017,6 +1045,10 @@ function scheduleTests(task_build, task_cert, test_base) { queue.scheduleTask(merge(ssl_base, { name: "SSL tests (upgradedb)", symbol: "upgradedb", cycle: "upgradedb" })); + queue.scheduleTask(merge(ssl_base, { + name: "SSL tests (stress)", symbol: "stress", cycle: "sharedb", + env: {NSS_SSL_RUN: "stress"} + })); } /*****************************************************************************/ @@ -1074,6 +1106,33 @@ async function scheduleTools() { })); queue.scheduleTask(merge(base, { + symbol: "coverity", + name: "coverity", + image: FUZZ_IMAGE, + tags: ['code-review'], + env: { + USE_64: "1", + CC: "clang", + CCC: "clang++", + NSS_AUTOMATION: "1" + }, + features: ["taskclusterProxy"], + scopes: ["secrets:get:project/relman/coverity-nss"], + artifacts: { + "public/code-review/coverity.json": { + expires: 24 * 7, + type: "file", + path: "/home/worker/nss/coverity/coverity.json" + } + }, + command: [ + "/bin/bash", + "-c", + "bin/checkout.sh && nss/automation/taskcluster/scripts/run_coverity.sh" + ] + })); + + queue.scheduleTask(merge(base, { symbol: "hacl", name: "hacl", image: HACL_GEN_IMAGE, @@ -1162,3 +1221,38 @@ async function scheduleTools() { return queue.submit(); } + +async function scheduleCodeReview() { + let tasks = queue.taggedTasks("code-review"); + if(! tasks) { + console.debug("No code review tasks, skipping ending task"); + return + } + + // From https://hg.mozilla.org/mozilla-central/file/tip/taskcluster/ci/code-review/kind.yml + queue.scheduleTask({ + platform: "nss-tools", + name: "code-review-issues", + description: "List all issues found in static analysis and linting tasks", + + // No logic on that task + image: LINUX_IMAGE, + command: ["/bin/true"], + + // This task must run after all analyzer tasks are completed + parents: tasks, + + // This option permits to run the task + // regardless of the analyzers tasks exit status + // as we are interested in the task failures + requires: "all-resolved", + + // Publish code review trigger on pulse + routes: ["project.relman.codereview.v1.try_ending"], + + kind: "code-review", + symbol: "E" + }); + + return queue.submit(); +}; diff --git a/security/nss/automation/taskcluster/graph/src/image_builder.js b/security/nss/automation/taskcluster/graph/src/image_builder.js index d9d7755dc..b69b31602 100644 --- a/security/nss/automation/taskcluster/graph/src/image_builder.js +++ b/security/nss/automation/taskcluster/graph/src/image_builder.js @@ -7,13 +7,13 @@ import context_hash from "./context_hash"; import taskcluster from "taskcluster-client"; async function taskHasImageArtifact(taskId) { - let queue = new taskcluster.Queue(); + let queue = new taskcluster.Queue(taskcluster.fromEnvVars()); let {artifacts} = await queue.listLatestArtifacts(taskId); return artifacts.some(artifact => artifact.name == "public/image.tar"); } async function findTaskWithImageArtifact(ns) { - let index = new taskcluster.Index(); + let index = new taskcluster.Index(taskcluster.fromEnvVars()); let {taskId} = await index.findTask(ns); let has_image = await taskHasImageArtifact(taskId); return has_image ? taskId : null; diff --git a/security/nss/automation/taskcluster/graph/src/index.js b/security/nss/automation/taskcluster/graph/src/index.js index 4153e1b18..2c7f5eb50 100644 --- a/security/nss/automation/taskcluster/graph/src/index.js +++ b/security/nss/automation/taskcluster/graph/src/index.js @@ -3,12 +3,26 @@ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ import * as try_syntax from "./try_syntax"; +import * as queue from "./queue"; import extend from "./extend"; -// Init try syntax filter. -if (process.env.TC_PROJECT == "nss-try") { - try_syntax.initFilter(); -} +const main = async () => { + // Init try syntax filter. + if (process.env.TC_PROJECT == "nss-try") { + await try_syntax.initFilter(); + } else { + // Coverity should not be run on landings, only by request (typically + // by Phabricator). + queue.filter(task => { + return task.symbol != "coverity"; + }); + } -// Extend the task graph. -extend().catch(console.error); + // Extend the task graph. + await extend(); +}; + +main().catch(err => { + console.error(err); + process.exit(1); +}); diff --git a/security/nss/automation/taskcluster/graph/src/queue.js b/security/nss/automation/taskcluster/graph/src/queue.js index 809a17bf1..fd5be2050 100644 --- a/security/nss/automation/taskcluster/graph/src/queue.js +++ b/security/nss/automation/taskcluster/graph/src/queue.js @@ -12,10 +12,12 @@ let maps = []; let filters = []; let tasks = new Map(); +let tags = new Map(); let image_tasks = new Map(); +let parameters = {}; let queue = new taskcluster.Queue({ - baseUrl: "http://taskcluster/queue/v1" + rootUrl: process.env.TASKCLUSTER_PROXY_URL, }); function fromNow(hours) { @@ -94,13 +96,17 @@ function convertTask(def) { let env = merge({ NSS_HEAD_REPOSITORY: process.env.NSS_HEAD_REPOSITORY, - NSS_HEAD_REVISION: process.env.NSS_HEAD_REVISION + NSS_HEAD_REVISION: process.env.NSS_HEAD_REVISION, + NSS_MAX_MP_PBE_ITERATION_COUNT: "100", }, def.env || {}); if (def.parent) { dependencies.push(def.parent); env.TC_PARENT_TASK_ID = def.parent; } + if (def.parents) { + dependencies = dependencies.concat(def.parents); + } if (def.tests) { env.NSS_TESTS = def.tests; @@ -109,6 +115,14 @@ function convertTask(def) { if (def.cycle) { env.NSS_CYCLES = def.cycle; } + if (def.kind === "build") { + // Disable leak checking during builds (bug 1579290). + if (env.ASAN_OPTIONS) { + env.ASAN_OPTIONS += ":detect_leaks=0"; + } else { + env.ASAN_OPTIONS = "detect_leaks=0"; + } + } let payload = { env, @@ -132,16 +146,27 @@ function convertTask(def) { } } + if (def.scopes) { + // Need to add existing scopes in the task definition + scopes.push.apply(scopes, def.scopes) + } + + let extra = Object.assign({ + treeherder: parseTreeherder(def) + }, parameters); + return { - provisionerId: def.provisioner || "aws-provisioner-v1", - workerType: def.workerType || "hg-worker", - schedulerId: "task-graph-scheduler", + provisionerId: def.provisioner || `nss-${process.env.MOZ_SCM_LEVEL}`, + workerType: def.workerType || "linux", + schedulerId: process.env.TC_SCHEDULER_ID, + taskGroupId: process.env.TASK_ID, scopes, created: fromNow(0), deadline: fromNow(24), dependencies, + requires: def.requires || "all-completed", routes: parseRoutes(def.routes || []), metadata: { @@ -152,10 +177,7 @@ function convertTask(def) { }, payload, - - extra: { - treeherder: parseTreeherder(def) - } + extra, }; } @@ -167,6 +189,18 @@ export function filter(fun) { filters.push(fun); } +export function addParameters(params) { + parameters = Object.assign(parameters, params); +} + +export function clearFilters(fun) { + filters = []; +} + +export function taggedTasks(tag) { + return tags[tag]; +} + export function scheduleTask(def) { let taskId = slugid.v4(); tasks.set(taskId, merge({}, def)); @@ -188,6 +222,16 @@ export async function submit() { let log_id = `${task.name} @ ${task.platform}[${task.collection || "opt"}]`; console.log(`+ Submitting ${log_id}.`); + // Index that task for each tag specified + if(task.tags) { + task.tags.map(tag => { + if(!tags[tag]) { + tags[tag] = []; + } + tags[tag].push(taskId); + }); + } + let parent = task.parent; // Convert the task definition. diff --git a/security/nss/automation/taskcluster/graph/src/try_syntax.js b/security/nss/automation/taskcluster/graph/src/try_syntax.js index f1772a658..ca0b84813 100644 --- a/security/nss/automation/taskcluster/graph/src/try_syntax.js +++ b/security/nss/automation/taskcluster/graph/src/try_syntax.js @@ -3,8 +3,14 @@ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ import * as queue from "./queue"; +import path from 'path' +import fs from 'fs' import intersect from "intersect"; import parse_args from "minimist"; +import util from "util"; +import child_process from 'child_process'; + +let execFile = util.promisify(child_process.execFile); function parseOptions(opts) { opts = parse_args(opts.split(/\s+/), { @@ -51,7 +57,7 @@ function parseOptions(opts) { } // Parse tools. - let allTools = ["clang-format", "scan-build", "hacl", "saw", "abi", "coverage"]; + let allTools = ["clang-format", "scan-build", "coverity", "hacl", "saw", "abi", "coverage"]; let tools = intersect(opts.tools.split(/\s*,\s*/), allTools); // If the given value is "all" run all tools. @@ -154,14 +160,40 @@ function filter(opts) { } } -export function initFilter() { - let comment = process.env.TC_COMMENT || ""; +async function getCommitComment() { + const res = await execFile('hg', ['log', '-r', '.', '-T', '{desc}']); + return res.stdout; +}; + +export async function initFilter() { + let comment = await getCommitComment(); + + // Load try_task_config.json + // Add parameters to queue for created tasks + let config_path = path.normalize(path.join(__dirname, '../../../../try_task_config.json')) + if (fs.existsSync(config_path)) { + var payload = JSON.parse(fs.readFileSync(config_path)); + if (payload['version'] == 2) { + queue.addParameters(payload['parameters']); + } + } // Check for try syntax in changeset comment. - let match = comment.match(/^\s*try:\s*(.*)\s*$/); + let match = comment.match(/\btry:\s*(.*)\s*$/m); // Add try syntax filter. if (match) { - queue.filter(filter(parseOptions(match[1]))); + let match1 = match[1]; + queue.filter(filter(parseOptions(match1))); + + if (match1.includes("--nspr-patch")) { + queue.map(task => { + if (!task.env) { + task.env = {}; + } + task.env.ALLOW_NSPR_PATCH = "1"; + return task; + }); + } } } |