diff options
author | wolfbeast <mcwerewolf@wolfbeast.com> | 2020-01-02 21:06:40 +0100 |
---|---|---|
committer | wolfbeast <mcwerewolf@wolfbeast.com> | 2020-01-02 21:06:40 +0100 |
commit | f4a12fc67689a830e9da1c87fd11afe5bc09deb3 (patch) | |
tree | 211ae0cd022a6c11b0026ecc7761a550c584583c /security/nss/mach | |
parent | f7d30133221896638f7bf4f66c504255c4b14f48 (diff) | |
download | UXP-f4a12fc67689a830e9da1c87fd11afe5bc09deb3.tar UXP-f4a12fc67689a830e9da1c87fd11afe5bc09deb3.tar.gz UXP-f4a12fc67689a830e9da1c87fd11afe5bc09deb3.tar.lz UXP-f4a12fc67689a830e9da1c87fd11afe5bc09deb3.tar.xz UXP-f4a12fc67689a830e9da1c87fd11afe5bc09deb3.zip |
Issue #1338 - Part 2: Update NSS to 3.48-RTM
Diffstat (limited to 'security/nss/mach')
-rw-r--r-- | security/nss/mach | 352 |
1 files changed, 340 insertions, 12 deletions
diff --git a/security/nss/mach b/security/nss/mach index 178cfeb74..ec344c1ec 100644 --- a/security/nss/mach +++ b/security/nss/mach @@ -11,12 +11,16 @@ import sys import argparse import fnmatch +import io import subprocess import os import platform +import shutil +import tarfile import tempfile from hashlib import sha256 +from gtests.common.wycheproof.genTestVectors import generate_test_vectors DEVNULL = open(os.devnull, 'wb') cwd = os.path.dirname(os.path.abspath(__file__)) @@ -37,19 +41,286 @@ def run_tests(test, cycles="standard", env={}, silent=False): stdout = stderr = DEVNULL if silent else None subprocess.check_call(command, env=os_env, stdout=stdout, stderr=stderr) + +class coverityAction(argparse.Action): + + def get_coverity_remote_cfg(self): + secret_name = 'project/relman/coverity-nss' + secrets_url = 'http://taskcluster/secrets/v1/secret/{}'.format(secret_name) + + print('Using symbol upload token from the secrets service: "{}"'. + format(secrets_url)) + + import requests + res = requests.get(secrets_url) + res.raise_for_status() + secret = res.json() + cov_config = secret['secret'] if 'secret' in secret else None + + if cov_config is None: + print('Ill formatted secret for Coverity. Aborting analysis.') + return None + + return cov_config + + def get_coverity_local_cfg(self, path): + try: + import yaml + file_handler = open(path) + config = yaml.safe_load(file_handler) + except Exception: + print('Unable to load coverity config from {}'.format(path)) + return None + return config + + def get_cov_config(self, path): + cov_config = None + if self.local_config: + cov_config = self.get_coverity_local_cfg(path) + else: + cov_config = self.get_coverity_remote_cfg() + + if cov_config is None: + print('Unable to load Coverity config.') + return 1 + + self.cov_analysis_url = cov_config.get('package_url') + self.cov_package_name = cov_config.get('package_name') + self.cov_url = cov_config.get('server_url') + self.cov_port = cov_config.get('server_port') + self.cov_auth = cov_config.get('auth_key') + self.cov_package_ver = cov_config.get('package_ver') + self.cov_full_stack = cov_config.get('full_stack', False) + + return 0 + + def download_coverity(self): + if self.cov_url is None or self.cov_port is None or self.cov_analysis_url is None or self.cov_auth is None: + print('Missing Coverity config options!') + return 1 + + COVERITY_CONFIG = ''' + { + "type": "Coverity configuration", + "format_version": 1, + "settings": { + "server": { + "host": "%s", + "port": %s, + "ssl" : true, + "on_new_cert" : "trust", + "auth_key_file": "%s" + }, + "stream": "NSS", + "cov_run_desktop": { + "build_cmd": ["%s"], + "clean_cmd": ["%s", "-cc"], + } + } + } + ''' + # Generate the coverity.conf and auth files + build_cmd = os.path.join(cwd, 'build.sh') + cov_auth_path = os.path.join(self.cov_state_path, 'auth') + cov_setup_path = os.path.join(self.cov_state_path, 'coverity.conf') + cov_conf = COVERITY_CONFIG % (self.cov_url, self.cov_port, cov_auth_path, build_cmd, build_cmd) + + def download(artifact_url, target): + import requests + resp = requests.get(artifact_url, verify=False, stream=True) + resp.raise_for_status() + + # Extract archive into destination + with tarfile.open(fileobj=io.BytesIO(resp.content)) as tar: + tar.extractall(target) + + download(self.cov_analysis_url, self.cov_state_path) + + with open(cov_auth_path, 'w') as f: + f.write(self.cov_auth) + + # Modify it's permission to 600 + os.chmod(cov_auth_path, 0o600) + + with open(cov_setup_path, 'a') as f: + f.write(cov_conf) + + def setup_coverity(self, config_path, storage_path=None, force_download=True): + rc = self.get_cov_config(config_path) + + if rc != 0: + return rc + + if storage_path is None: + # If storage_path is None we set the context of the coverity into the cwd. + storage_path = cwd + + self.cov_state_path = os.path.join(storage_path, "coverity") + + if force_download is True or not os.path.exists(self.cov_state_path): + shutil.rmtree(self.cov_state_path, ignore_errors=True) + os.mkdir(self.cov_state_path) + + # Download everything that we need for Coverity from out private instance + self.download_coverity() + + self.cov_path = os.path.join(self.cov_state_path, self.cov_package_name) + self.cov_run_desktop = os.path.join(self.cov_path, 'bin', 'cov-run-desktop') + self.cov_translate = os.path.join(self.cov_path, 'bin', 'cov-translate') + self.cov_configure = os.path.join(self.cov_path, 'bin', 'cov-configure') + self.cov_work_path = os.path.join(self.cov_state_path, 'data-coverity') + self.cov_idir_path = os.path.join(self.cov_work_path, self.cov_package_ver, 'idir') + + if not os.path.exists(self.cov_path) or \ + not os.path.exists(self.cov_run_desktop) or \ + not os.path.exists(self.cov_translate) or \ + not os.path.exists(self.cov_configure): + print('Missing Coverity in {}'.format(self.cov_path)) + return 1 + + return 0 + + def run_process(self, args, cwd=cwd): + proc = subprocess.Popen(args, cwd=cwd) + status = None + while status is None: + try: + status = proc.wait() + except KeyboardInterrupt: + pass + return status + + def cov_is_file_in_source(self, abs_path): + if os.path.islink(abs_path): + abs_path = os.path.realpath(abs_path) + return abs_path + + def dump_cov_artifact(self, cov_results, source, output): + import json + + def relpath(path): + '''Build path relative to repository root''' + if path.startswith(cwd): + return os.path.relpath(path, cwd) + return path + + # Parse Coverity json into structured issues + with open(cov_results) as f: + result = json.load(f) + + # Parse the issues to a standard json format + issues_dict = {'files': {}} + + files_list = issues_dict['files'] + + def build_element(issue): + # We look only for main event + event_path = next((event for event in issue['events'] if event['main'] is True), None) + + dict_issue = { + 'line': issue['mainEventLineNumber'], + 'flag': issue['checkerName'], + 'message': event_path['eventDescription'], + 'extra': { + 'category': issue['checkerProperties']['category'], + 'stateOnServer': issue['stateOnServer'], + 'stack': [] + } + } + + # Embed all events into extra message + for event in issue['events']: + dict_issue['extra']['stack'].append({'file_path': relpath(event['strippedFilePathname']), + 'line_number': event['lineNumber'], + 'path_type': event['eventTag'], + 'description': event['eventDescription']}) + + return dict_issue + + for issue in result['issues']: + path = self.cov_is_file_in_source(issue['strippedMainEventFilePathname']) + if path is None: + # Since we skip a result we should log it + print('Skipping CID: {0} from file: {1} since it\'s not related with the current patch.'.format( + issue['stateOnServer']['cid'], issue['strippedMainEventFilePathname'])) + continue + path = relpath(path) + if path in files_list: + files_list[path]['warnings'].append(build_element(issue)) + else: + files_list[path] = {'warnings': [build_element(issue)]} + + with open(output, 'w') as f: + json.dump(issues_dict, f) + + def mutate_paths(self, paths): + for index in xrange(len(paths)): + paths[index] = os.path.abspath(paths[index]) + + def __call__(self, parser, args, paths, option_string=None): + self.local_config = True + config_path = args.config + storage_path = args.storage + + have_paths = True + if len(paths) == 0: + have_paths = False + print('No files have been specified for analysis, running Coverity on the entire project.') + + self.mutate_paths(paths) + + if config_path is None: + self.local_config = False + print('No coverity config path has been specified, so running in automation.') + if 'NSS_AUTOMATION' not in os.environ: + print('Coverity based static-analysis cannot be ran outside automation.') + return 1 + + rc = self.setup_coverity(config_path, storage_path, args.force) + if rc != 0: + return 1 + + # First run cov-run-desktop --setup in order to setup the analysis env + cmd = [self.cov_run_desktop, '--setup'] + print('Running {} --setup'.format(self.cov_run_desktop)) + + rc = self.run_process(args=cmd, cwd=self.cov_path) + + if rc != 0: + print('Running {} --setup failed!'.format(self.cov_run_desktop)) + return rc + + cov_result = os.path.join(self.cov_state_path, 'cov-results.json') + + # Once the capture is performed we need to do the actual Coverity Desktop analysis + if have_paths: + cmd = [self.cov_run_desktop, '--json-output-v6', cov_result] + paths + else: + cmd = [self.cov_run_desktop, '--json-output-v6', cov_result, '--analyze-captured-source'] + + print('Running Coverity Analysis for {}'.format(cmd)) + + rc = self.run_process(cmd, cwd=self.cov_state_path) + + if rc != 0: + print('Coverity Analysis failed!') + + # On automation, like try, we want to build an artifact with the results. + if 'NSS_AUTOMATION' in os.environ: + self.dump_cov_artifact(cov_result, cov_result, "/home/worker/nss/coverity/coverity.json") + + class cfAction(argparse.Action): - docker_command = ["docker"] + docker_command = None restorecon = None def __call__(self, parser, args, values, option_string=None): - if not args.noroot: - self.setDockerCommand() + self.setDockerCommand(args) if values: files = [os.path.relpath(os.path.abspath(x), start=cwd) for x in values] else: files = self.modifiedFiles() - files = [os.path.join('/home/worker/nss', x) for x in files] # First check if we can run docker. try: @@ -57,9 +328,16 @@ class cfAction(argparse.Action): subprocess.check_call( self.docker_command + ["images"], stdout=f) except: - print("Please install docker and start the docker daemon.") - sys.exit(1) + self.docker_command = None + + if self.docker_command is None: + print("warning: running clang-format directly, which isn't guaranteed to be correct") + command = [cwd + "/automation/clang-format/run_clang_format.sh"] + files + repr(command) + subprocess.call(command) + return + files = [os.path.join('/home/worker/nss', x) for x in files] docker_image = 'clang-format-service:latest' cf_docker_folder = cwd + "/automation/clang-format" @@ -112,11 +390,17 @@ class cfAction(argparse.Action): subprocess.check_call(command) return - def setDockerCommand(self): + def setDockerCommand(self, args): + from distutils.spawn import find_executable if platform.system() == "Linux": - from distutils.spawn import find_executable - self.restorecon = find_executable('restorecon') - self.docker_command = ["sudo"] + self.docker_command + self.restorecon = find_executable("restorecon") + dcmd = find_executable("docker") + if dcmd is not None: + self.docker_command = [dcmd] + if not args.noroot: + self.docker_command = ["sudo"] + self.docker_command + else: + self.docker_command = None def modifiedFiles(self): files = [] @@ -212,6 +496,13 @@ class commandsAction(argparse.Action): for c in commandsAction.commands: print(c) +class wycheproofAction(argparse.Action): + + def __call__(self, parser, args, values, option_string=None): + generate_test_vectors() + clangFormat = cfAction(None, None, None) + clangFormat(None, args, None) + def parse_arguments(): parser = argparse.ArgumentParser( @@ -242,6 +533,31 @@ def parse_arguments(): help="Specify files or directories to run clang-format on", action=cfAction) + parser_sa = subparsers.add_parser( + 'static-analysis', + help=""" + Run static-analysis tools based on coverity. + + By default this runs only on automation and provides a list of issues that + are only present locally. + """) + parser_sa.add_argument( + '--config', help='Path to Coverity config file. Only used for local runs.', + default=None) + parser_sa.add_argument( + '--storage', help=""" + Path where to store Coverity binaries and results. If none, the base repository will be used. + """, + default=None) + parser_sa.add_argument( + '--force', help='Force the re-download of the coverity artefact.', + action='store_true') + parser_sa.add_argument( + '<file>', + nargs='*', + help="Specify files to run Coverity on. If no files are specified the analysis will check the entire project.", + action=coverityAction) + parser_test = subparsers.add_parser( 'tests', help='Run tests through tests/all.sh.') tests = [ @@ -263,13 +579,25 @@ def parse_arguments(): action=covAction) parser_commands = subparsers.add_parser( - 'mach-commands', + 'mach-completion', help="list commands") parser_commands.add_argument( - 'mach-commands', + 'mach-completion', nargs='*', action=commandsAction) + parser_wycheproof = subparsers.add_parser( + 'wycheproof', + help="generate wycheproof test headers") + parser_wycheproof.add_argument( + '--noroot', + help='On linux, suppress the use of \'sudo\' for running docker.', + action='store_true') + parser_wycheproof.add_argument( + 'wycheproof', + nargs='*', + action=wycheproofAction) + commandsAction.commands = [c for c in subparsers.choices] return parser.parse_args() |