def vendor(self, packages=None, with_windows_wheel=False): self.populate_logger() self.log_manager.enable_unstructured() vendor_dir = mozpath.join(self.topsrcdir, os.path.join('third_party', 'python')) packages = packages or [] if with_windows_wheel and len(packages) != 1: raise Exception( '--with-windows-wheel is only supported for a single package!') self.activate_virtualenv() pip_compile = os.path.join(self.virtualenv_manager.bin_path, "pip-compile") if not os.path.exists(pip_compile): path = os.path.normpath( os.path.join(self.topsrcdir, 'third_party', 'python', 'pip-tools')) self.virtualenv_manager.install_pip_package(path, vendored=True) spec = os.path.join(vendor_dir, 'requirements.in') requirements = os.path.join(vendor_dir, 'requirements.txt') with TemporaryDirectory() as spec_dir: tmpspec = 'requirements-mach-vendor-python.in' tmpspec_absolute = os.path.join(spec_dir, tmpspec) shutil.copyfile(spec, tmpspec_absolute) self._update_packages(tmpspec_absolute, packages) # resolve the dependencies and update requirements.txt subprocess.check_output( [ pip_compile, tmpspec, '--no-header', '--no-index', '--output-file', requirements, '--generate-hashes' ], # Run pip-compile from within the temporary directory so that the "via" # annotations don't have the non-deterministic temporary path in them. cwd=spec_dir) with TemporaryDirectory() as tmp: # use requirements.txt to download archived source distributions of all packages self.virtualenv_manager._run_pip([ 'download', '-r', requirements, '--no-deps', '--dest', tmp, '--no-binary', ':all:', '--disable-pip-version-check' ]) if with_windows_wheel: # This is hardcoded to CPython 2.7 for win64, which is good # enough for what we need currently. If we need psutil for Python 3 # in the future that could be added here as well. self.virtualenv_manager._run_pip([ 'download', '--dest', tmp, '--no-deps', '--only-binary', ':all:', '--platform', 'win_amd64', '--implementation', 'cp', '--python-version', '27', '--abi', 'none', '--disable-pip-version-check', packages[0] ]) self._extract(tmp, vendor_dir) shutil.copyfile(tmpspec_absolute, spec) self.repository.add_remove_files(vendor_dir)
def _process_jacoco_reports(self): def download_grcov(parent_dir): fetch_script_path = os.path.join(self.topsrcdir, 'taskcluster', 'scripts', 'misc', 'fetch-content') args = [fetch_script_path, 'task-artifacts', os.environ['MOZ_FETCHES'], '-d', parent_dir] self.run_process(args, ensure_exit_code=True) return os.path.join(parent_dir, 'grcov') def run_grcov(grcov_path, input_path): args = [grcov_path, input_path, '-t', 'lcov'] return subprocess.check_output(args) with TemporaryDirectory() as xml_dir, TemporaryDirectory() as grcov_dir: grcov = download_grcov(grcov_dir) report_xml_template = self.topobjdir + '/gradle/build/mobile/android/%s/reports/jacoco/jacocoTestReport/jacocoTestReport.xml' # NOQA: E501 shutil.copy(report_xml_template % 'app', os.path.join(xml_dir, 'app.xml')) shutil.copy(report_xml_template % 'geckoview', os.path.join(xml_dir, 'geckoview.xml')) # Parse output files with grcov. grcov_output = run_grcov(grcov, xml_dir) grcov_zip_path = os.path.join(self.topobjdir, 'code-coverage-grcov.zip') with zipfile.ZipFile(grcov_zip_path, 'w', zipfile.ZIP_DEFLATED) as z: z.writestr('grcov_lcov_output.info', grcov_output)
def vendor(self, packages=None, keep_extra_files=False): self.populate_logger() self.log_manager.enable_unstructured() vendor_dir = mozpath.join(self.topsrcdir, os.path.join("third_party", "python")) packages = packages or [] self.activate_virtualenv() pip_compile = os.path.join(self.virtualenv_manager.bin_path, "pip-compile") if not os.path.exists(pip_compile): path = os.path.normpath( os.path.join(self.topsrcdir, "third_party", "python", "pip-tools")) self.virtualenv_manager.install_pip_package(path, vendored=True) spec = os.path.join(vendor_dir, "requirements.in") requirements = os.path.join(vendor_dir, "requirements.txt") with TemporaryDirectory() as spec_dir: tmpspec = "requirements-mach-vendor-python.in" tmpspec_absolute = os.path.join(spec_dir, tmpspec) shutil.copyfile(spec, tmpspec_absolute) self._update_packages(tmpspec_absolute, packages) # resolve the dependencies and update requirements.txt subprocess.check_output( [ pip_compile, tmpspec, "--no-header", "--no-index", "--output-file", requirements, "--generate-hashes", ], # Run pip-compile from within the temporary directory so that the "via" # annotations don't have the non-deterministic temporary path in them. cwd=spec_dir, ) with TemporaryDirectory() as tmp: # use requirements.txt to download archived source distributions of all packages self.virtualenv_manager._run_pip([ "download", "-r", requirements, "--no-deps", "--dest", tmp, "--no-binary", ":all:", "--disable-pip-version-check", ]) self._extract(tmp, vendor_dir, keep_extra_files) shutil.copyfile(tmpspec_absolute, spec) self.repository.add_remove_files(vendor_dir)
def test_substring_mappings(self): """Test that a path mapping that's a substring of another works.""" with TemporaryDirectory() as d1, TemporaryDirectory() as d2: open(os.path.join(d1, "test1.txt"), "w").write("test 1 contents") open(os.path.join(d2, "test2.txt"), "w").write("test 2 contents") httpd = mozhttpd.MozHttpd(port=0, path_mappings={'/abcxyz': d1, '/abc': d2, } ) httpd.start(block=False) self.try_get(httpd.get_url("/abcxyz/test1.txt"), "test 1 contents") self.try_get(httpd.get_url("/abc/test2.txt"), "test 2 contents") httpd.stop()
def test_basic(self): """Test that requests to docroot and a path mapping work as expected.""" with TemporaryDirectory() as d1, TemporaryDirectory() as d2: open(os.path.join(d1, "test1.txt"), "w").write("test 1 contents") open(os.path.join(d2, "test2.txt"), "w").write("test 2 contents") httpd = mozhttpd.MozHttpd(port=0, docroot=d1, path_mappings={'/files': d2}) httpd.start(block=False) self.try_get(httpd.get_url("/test1.txt"), "test 1 contents") self.try_get(httpd.get_url("/files/test2.txt"), "test 2 contents") self.try_get_expect_404(httpd.get_url("/files/test2_nope.txt")) httpd.stop()
def test_no_docroot(self): """Test that path mappings with no docroot work.""" with TemporaryDirectory() as d1: httpd = mozhttpd.MozHttpd(port=0, path_mappings={'/foo': d1}) httpd.start(block=False) self.try_get_expect_404(httpd.get_url()) httpd.stop()
def run_suite(self, suite, groups, output_zip): with TemporaryDirectory() as temp_dir: result_files = self.run_test(suite, groups, temp_dir) for path in result_files: file_name = os.path.split(path)[1] output_zip.write(path, "%s/%s" % (suite, file_name))
def install_pip_package(self, package, vendored=False): """Install a package via pip. The supplied package is specified using a pip requirement specifier. e.g. 'foo' or 'foo==1.0'. If the package is already installed, this is a no-op. If vendored is True, no package index will be used and no dependencies will be installed. """ import mozfile from mozfile import TemporaryDirectory if sys.executable.startswith(self.bin_path): # If we're already running in this interpreter, we can optimize in # the case that the package requirement is already satisfied. from pip._internal.req.constructors import install_req_from_line req = install_req_from_line(package) req.check_if_exists(use_user_site=False) if req.satisfied_by is not None: return args = ["install"] vendored_dist_info_dir = None if vendored: args.extend([ "--no-deps", "--no-index", # The setup will by default be performed in an isolated build # environment, and since we're running with --no-index, this # means that pip will be unable to install in the isolated build # environment any dependencies that might be specified in a # setup_requires directive for the package. Since we're manually # controlling our build environment, build isolation isn't a # concern and we can disable that feature. Note that this is # safe and doesn't risk trampling any other packages that may be # installed due to passing `--no-deps --no-index` as well. "--no-build-isolation", ]) vendored_dist_info_dir = next( (d for d in os.listdir(package) if d.endswith(".dist-info")), None) with TemporaryDirectory() as tmp: if vendored_dist_info_dir: # This is a vendored wheel. We have to re-pack it in order for pip # to install it. wheel_file = os.path.join( tmp, "{}-1.0-py3-none-any.whl".format( os.path.basename(package))) shutil.make_archive(wheel_file, "zip", package) mozfile.move("{}.zip".format(wheel_file), wheel_file) package = wheel_file args.append(package) return self._run_pip(args, stderr=subprocess.STDOUT)
def run_suite(self, suite, groups, log_manager, report_manager): with TemporaryDirectory() as temp_dir: result_files, structured_path = self.run_test( suite, groups, temp_dir) self.regressions.append( report_manager.add_subsuite_report(structured_path, result_files))
def test_exception(self): """ensure that TemporaryDirectory handles exceptions""" path = None with self.assertRaises(Exception): with TemporaryDirectory() as tmp: path = tmp self.assertTrue(os.path.isdir(tmp)) raise Exception("oops") self.assertFalse(os.path.isdir(path)) self.assertFalse(os.path.exists(path))
def test_removed(self): """ensure that a TemporaryDirectory gets removed""" path = None with TemporaryDirectory() as tmp: path = tmp self.assertTrue(os.path.isdir(tmp)) tmpfile = os.path.join(tmp, "a_temp_file") open(tmpfile, "w").write("data") self.assertTrue(os.path.isfile(tmpfile)) self.assertFalse(os.path.isdir(path)) self.assertFalse(os.path.exists(path))
def test_multipart_path_mapping(self): """Test that a path mapping with multiple directories works.""" with TemporaryDirectory() as d1: open(os.path.join(d1, "test1.txt"), "w").write("test 1 contents") httpd = mozhttpd.MozHttpd(port=0, path_mappings={'/abc/def/ghi': d1}) httpd.start(block=False) self.try_get(httpd.get_url("/abc/def/ghi/test1.txt"), "test 1 contents") self.try_get_expect_404(httpd.get_url("/abc/test1.txt")) self.try_get_expect_404(httpd.get_url("/abc/def/test1.txt")) httpd.stop()
def _process_jacoco_reports(self): def run_grcov(grcov_path, input_path): args = [grcov_path, input_path, '-t', 'lcov'] return subprocess.check_output(args) with TemporaryDirectory() as xml_dir: grcov = os.path.join(os.environ['MOZ_FETCHES_DIR'], 'grcov') report_xml_template = self.topobjdir + '/gradle/build/mobile/android/%s/reports/jacoco/jacocoTestReport/jacocoTestReport.xml' # NOQA: E501 shutil.copy(report_xml_template % 'app', os.path.join(xml_dir, 'app.xml')) shutil.copy(report_xml_template % 'geckoview', os.path.join(xml_dir, 'geckoview.xml')) # Parse output files with grcov. grcov_output = run_grcov(grcov, xml_dir) grcov_zip_path = os.path.join(self.topobjdir, 'code-coverage-grcov.zip') with zipfile.ZipFile(grcov_zip_path, 'w', zipfile.ZIP_DEFLATED) as z: z.writestr('grcov_lcov_output.info', grcov_output)
def constructCertDatabase(build, srcDir): certutil = build.get_binary_path(what="certutil") pk12util = build.get_binary_path(what="pk12util") openssl = distutils.spawn.find_executable("openssl") pycert = os.path.join(build.topsrcdir, "security", "manager", "ssl", "tests", "unit", "pycert.py") pykey = os.path.join(build.topsrcdir, "security", "manager", "ssl", "tests", "unit", "pykey.py") with NamedTemporaryFile() as pwfile, TemporaryDirectory() as pemfolder: pwfile.write("\n") pwfile.flush() if dbFilesExist(srcDir): # Make sure all DB files from src are really deleted unlinkDbFiles(srcDir) # Copy all .certspec and .keyspec files to a temporary directory for root, dirs, files in os.walk(srcDir): for spec in [ i for i in files if i.endswith(".certspec") or i.endswith(".keyspec") ]: shutil.copyfile(os.path.join(root, spec), os.path.join(pemfolder, spec)) # Write a certspec for the "server-locations.txt" file to that temporary directory pgoserver_certspec = os.path.join(pemfolder, "pgoserver.certspec") if os.path.exists(pgoserver_certspec): raise Exception("{} already exists, which isn't allowed".format( pgoserver_certspec)) with open(pgoserver_certspec, "w") as fd: writeCertspecForServerLocations(fd) # Generate certs for all certspecs for root, dirs, files in os.walk(pemfolder): for certspec in [i for i in files if i.endswith(".certspec")]: name = certspec.split(".certspec")[0] pem = os.path.join(pemfolder, "{}.cert.pem".format(name)) print("Generating public certificate {} (pem={})".format( name, pem)) with open(os.path.join(root, certspec), "r") as certspec_file: certspec_data = certspec_file.read() with open(pem, "w") as pem_file: status = runUtil(pycert, [], inputdata=certspec_data, outputstream=pem_file) if status: return status status = runUtil(certutil, [ "-A", "-n", name, "-t", "P,,", "-i", pem, "-d", srcDir, "-f", pwfile.name ]) if status: return status for keyspec in [i for i in files if i.endswith(".keyspec")]: parts = keyspec.split(".") name = parts[0] key_type = parts[1] if key_type not in ["ca", "client", "server"]: raise Exception( "{}: keyspec filenames must be of the form XXX.client.keyspec " "or XXX.ca.keyspec (key_type={})".format( keyspec, key_type)) key_pem = os.path.join(pemfolder, "{}.key.pem".format(name)) print("Generating private key {} (pem={})".format( name, key_pem)) with open(os.path.join(root, keyspec), "r") as keyspec_file: keyspec_data = keyspec_file.read() with open(key_pem, "w") as pem_file: status = runUtil(pykey, [], inputdata=keyspec_data, outputstream=pem_file) if status: return status cert_pem = os.path.join(pemfolder, "{}.cert.pem".format(name)) if not os.path.exists(cert_pem): raise Exception( "There has to be a corresponding certificate named {} for " "the keyspec {}".format(cert_pem, keyspec)) p12 = os.path.join(pemfolder, "{}.key.p12".format(name)) print("Converting private key {} to PKCS12 (p12={})".format( key_pem, p12)) status = runUtil(openssl, [ "pkcs12", "-export", "-inkey", key_pem, "-in", cert_pem, "-name", name, "-out", p12, "-passout", "file:" + pwfile.name ]) if status: return status print("Importing private key {} to database".format(key_pem)) status = runUtil(pk12util, [ "-i", p12, "-d", srcDir, "-w", pwfile.name, "-k", pwfile.name ]) if status: return status if key_type == "ca": shutil.copyfile(cert_pem, os.path.join(srcDir, "{}.ca".format(name))) elif key_type == "client": shutil.copyfile( p12, os.path.join(srcDir, "{}.client".format(name))) elif key_type == "server": pass # Nothing to do for server keys else: raise Exception( "State error: Unknown keyspec key_type: {}".format( key_type)) return 0
def valgrind_test(self, suppressions): from mozfile import TemporaryDirectory from mozhttpd import MozHttpd from mozprofile import FirefoxProfile, Preferences from mozprofile.permissions import ServerLocations from mozrunner import FirefoxRunner from mozrunner.utils import findInPath from six import string_types from valgrind.output_handler import OutputHandler build_dir = os.path.join(self.topsrcdir, 'build') # XXX: currently we just use the PGO inputs for Valgrind runs. This may # change in the future. httpd = MozHttpd(docroot=os.path.join(build_dir, 'pgo')) httpd.start(block=False) with TemporaryDirectory() as profilePath: # TODO: refactor this into mozprofile profile_data_dir = os.path.join(self.topsrcdir, 'testing', 'profiles') with open(os.path.join(profile_data_dir, 'profiles.json'), 'r') as fh: base_profiles = json.load(fh)['valgrind'] prefpaths = [ os.path.join(profile_data_dir, profile, 'user.js') for profile in base_profiles ] prefs = {} for path in prefpaths: prefs.update(Preferences.read_prefs(path)) interpolation = { 'server': '%s:%d' % httpd.httpd.server_address, } for k, v in prefs.items(): if isinstance(v, string_types): v = v.format(**interpolation) prefs[k] = Preferences.cast(v) quitter = os.path.join(self.topsrcdir, 'tools', 'quitter', '*****@*****.**') locations = ServerLocations() locations.add_host(host='127.0.0.1', port=httpd.httpd.server_port, options='primary') profile = FirefoxProfile(profile=profilePath, preferences=prefs, addons=[quitter], locations=locations) firefox_args = [httpd.get_url()] env = os.environ.copy() env['G_SLICE'] = 'always-malloc' env['MOZ_CC_RUN_DURING_SHUTDOWN'] = '1' env['MOZ_CRASHREPORTER_NO_REPORT'] = '1' env['MOZ_DISABLE_NONLOCAL_CONNECTIONS'] = '1' env['XPCOM_DEBUG_BREAK'] = 'warn' env.update(self.extra_environment_variables) outputHandler = OutputHandler(self.log) kp_kwargs = {'processOutputLine': [outputHandler]} valgrind = 'valgrind' if not os.path.exists(valgrind): valgrind = findInPath(valgrind) valgrind_args = [ valgrind, '--sym-offsets=yes', '--smc-check=all-non-file', '--vex-iropt-register-updates=allregs-at-mem-access', '--gen-suppressions=all', '--num-callers=36', '--leak-check=full', '--show-possibly-lost=no', '--track-origins=yes', '--trace-children=yes', '-v', # Enable verbosity to get the list of used suppressions # Avoid excessive delays in the presence of spinlocks. # See bug 1309851. '--fair-sched=yes', # Keep debuginfo after library unmap. See bug 1382280. '--keep-debuginfo=yes', # Reduce noise level on rustc and/or LLVM compiled code. # See bug 1365915 '--expensive-definedness-checks=yes', ] for s in suppressions: valgrind_args.append('--suppressions=' + s) supps_dir = os.path.join(build_dir, 'valgrind') supps_file1 = os.path.join(supps_dir, 'cross-architecture.sup') valgrind_args.append('--suppressions=' + supps_file1) if mozinfo.os == 'linux': machtype = { 'x86_64': 'x86_64-pc-linux-gnu', 'x86': 'i386-pc-linux-gnu', }.get(mozinfo.processor) if machtype: supps_file2 = os.path.join(supps_dir, machtype + '.sup') if os.path.isfile(supps_file2): valgrind_args.append('--suppressions=' + supps_file2) exitcode = None timeout = 1800 try: runner = FirefoxRunner(profile=profile, binary=self.get_binary_path(), cmdargs=firefox_args, env=env, process_args=kp_kwargs) runner.start(debug_args=valgrind_args) exitcode = runner.wait(timeout=timeout) finally: errs = outputHandler.error_count supps = outputHandler.suppression_count if errs != supps: status = 1 # turns the TBPL job orange self.log( logging.ERROR, 'valgrind-fail-parsing', { 'errs': errs, 'supps': supps }, 'TEST-UNEXPECTED-FAIL | valgrind-test | error parsing: {errs} errors ' 'seen, but {supps} generated suppressions seen') elif errs == 0: status = 0 self.log( logging.INFO, 'valgrind-pass', {}, 'TEST-PASS | valgrind-test | valgrind found no errors') else: status = 1 # turns the TBPL job orange # We've already printed details of the errors. if exitcode is None: status = 2 # turns the TBPL job red self.log( logging.ERROR, 'valgrind-fail-timeout', {'timeout': timeout}, 'TEST-UNEXPECTED-FAIL | valgrind-test | Valgrind timed out ' '(reached {timeout} second limit)') elif exitcode != 0: status = 2 # turns the TBPL job red self.log( logging.ERROR, 'valgrind-fail-errors', {'exitcode': exitcode}, 'TEST-UNEXPECTED-FAIL | valgrind-test | non-zero exit code ' 'from Valgrind: {exitcode}') httpd.stop() return status
def valgrind_test(self, suppressions): import json import re import sys import tempfile from mozbuild.base import MozbuildObject from mozfile import TemporaryDirectory from mozhttpd import MozHttpd from mozprofile import FirefoxProfile, Preferences from mozprofile.permissions import ServerLocations from mozrunner import FirefoxRunner from mozrunner.utils import findInPath build_dir = os.path.join(self.topsrcdir, 'build') # XXX: currently we just use the PGO inputs for Valgrind runs. This may # change in the future. httpd = MozHttpd(docroot=os.path.join(build_dir, 'pgo')) httpd.start(block=False) with TemporaryDirectory() as profilePath: #TODO: refactor this into mozprofile prefpath = os.path.join(self.topsrcdir, 'testing', 'profiles', 'prefs_general.js') prefs = {} prefs.update(Preferences.read_prefs(prefpath)) interpolation = { 'server': '%s:%d' % httpd.httpd.server_address, 'OOP': 'false' } prefs = json.loads(json.dumps(prefs) % interpolation) for pref in prefs: prefs[pref] = Preferences.cast(prefs[pref]) quitter = os.path.join(self.distdir, 'xpi-stage', 'quitter') locations = ServerLocations() locations.add_host(host='127.0.0.1', port=httpd.httpd.server_port, options='primary') profile = FirefoxProfile(profile=profilePath, preferences=prefs, addons=[quitter], locations=locations) firefox_args = [httpd.get_url()] env = os.environ.copy() env['G_SLICE'] = 'always-malloc' env['XPCOM_CC_RUN_DURING_SHUTDOWN'] = '1' env['MOZ_CRASHREPORTER_NO_REPORT'] = '1' env['XPCOM_DEBUG_BREAK'] = 'warn' class OutputHandler(object): def __init__(self): self.found_errors = False def __call__(self, line): print(line) m = re.match( r'.*ERROR SUMMARY: [1-9]\d* errors from \d+ contexts', line) if m: self.found_errors = True outputHandler = OutputHandler() kp_kwargs = {'processOutputLine': [outputHandler]} valgrind = 'valgrind' if not os.path.exists(valgrind): valgrind = findInPath(valgrind) valgrind_args = [ valgrind, '--smc-check=all-non-file', '--vex-iropt-register-updates=allregs-at-mem-access', '--gen-suppressions=all', '--num-callers=20', '--leak-check=full', '--show-possibly-lost=no', '--track-origins=yes' ] for s in suppressions: valgrind_args.append('--suppressions=' + s) supps_dir = os.path.join(build_dir, 'valgrind') supps_file1 = os.path.join(supps_dir, 'cross-architecture.sup') valgrind_args.append('--suppressions=' + supps_file1) # MACHTYPE is an odd bash-only environment variable that doesn't # show up in os.environ, so we have to get it another way. machtype = subprocess.check_output( ['bash', '-c', 'echo $MACHTYPE']).rstrip() supps_file2 = os.path.join(supps_dir, machtype + '.sup') if os.path.isfile(supps_file2): valgrind_args.append('--suppressions=' + supps_file2) exitcode = None try: runner = FirefoxRunner(profile=profile, binary=self.get_binary_path(), cmdargs=firefox_args, env=env, kp_kwargs=kp_kwargs) runner.start(debug_args=valgrind_args) exitcode = runner.wait() finally: if not outputHandler.found_errors: status = 0 print( 'TEST-PASS | valgrind-test | valgrind found no errors') else: status = 1 # turns the TBPL job orange print( 'TEST-UNEXPECTED-FAIL | valgrind-test | valgrind found errors' ) if exitcode != 0: status = 2 # turns the TBPL job red print( 'TEST-UNEXPECTED-FAIL | valgrind-test | non-zero exit code from Valgrind' ) httpd.stop() return status
def vendor(self, keep_extra_files=False): self.populate_logger() self.log_manager.enable_unstructured() vendor_dir = mozpath.join(self.topsrcdir, os.path.join("third_party", "python")) self.activate_virtualenv() spec = os.path.join(vendor_dir, "requirements.in") requirements = os.path.join(vendor_dir, "requirements.txt") with TemporaryDirectory() as spec_dir: tmpspec = "requirements-mach-vendor-python.in" tmpspec_absolute = os.path.join(spec_dir, tmpspec) shutil.copyfile(spec, tmpspec_absolute) self._update_packages(tmpspec_absolute) tmp_requirements_absolute = os.path.join(spec_dir, "requirements.txt") # Copy the existing "requirements.txt" file so that the versions # of transitive dependencies aren't implicitly changed. shutil.copy(requirements, tmp_requirements_absolute) # resolve the dependencies and update requirements.txt subprocess.check_output( [ self.virtualenv_manager.python_path, "-m", "piptools", "compile", tmpspec, "--no-header", "--no-emit-index-url", "--output-file", tmp_requirements_absolute, "--generate-hashes", ], # Run pip-compile from within the temporary directory so that the "via" # annotations don't have the non-deterministic temporary path in them. cwd=spec_dir, ) with TemporaryDirectory() as tmp: # use requirements.txt to download archived source distributions of all packages self.virtualenv_manager._run_pip( [ "download", "-r", tmp_requirements_absolute, "--no-deps", "--dest", tmp, "--abi", "none", "--platform", "any", ] ) _purge_vendor_dir(vendor_dir) self._extract(tmp, vendor_dir, keep_extra_files) shutil.copyfile(tmpspec_absolute, spec) shutil.copy(tmp_requirements_absolute, requirements) self.repository.add_remove_files(vendor_dir)
def valgrind_test(self, suppressions): from mozfile import TemporaryDirectory from mozhttpd import MozHttpd from mozprofile import FirefoxProfile, Preferences from mozprofile.permissions import ServerLocations from mozrunner import FirefoxRunner from mozrunner.utils import findInPath from six import string_types from valgrind.output_handler import OutputHandler build_dir = os.path.join(self.topsrcdir, "build") # XXX: currently we just use the PGO inputs for Valgrind runs. This may # change in the future. httpd = MozHttpd(docroot=os.path.join(build_dir, "pgo")) httpd.start(block=False) with TemporaryDirectory() as profilePath: # TODO: refactor this into mozprofile profile_data_dir = os.path.join(self.topsrcdir, "testing", "profiles") with open(os.path.join(profile_data_dir, "profiles.json"), "r") as fh: base_profiles = json.load(fh)["valgrind"] prefpaths = [ os.path.join(profile_data_dir, profile, "user.js") for profile in base_profiles ] prefs = {} for path in prefpaths: prefs.update(Preferences.read_prefs(path)) interpolation = { "server": "%s:%d" % httpd.httpd.server_address, } for k, v in prefs.items(): if isinstance(v, string_types): v = v.format(**interpolation) prefs[k] = Preferences.cast(v) quitter = os.path.join(self.topsrcdir, "tools", "quitter", "*****@*****.**") locations = ServerLocations() locations.add_host(host="127.0.0.1", port=httpd.httpd.server_port, options="primary") profile = FirefoxProfile( profile=profilePath, preferences=prefs, addons=[quitter], locations=locations, ) firefox_args = [httpd.get_url()] env = os.environ.copy() env["G_SLICE"] = "always-malloc" env["MOZ_CC_RUN_DURING_SHUTDOWN"] = "1" env["MOZ_CRASHREPORTER_NO_REPORT"] = "1" env["MOZ_DISABLE_NONLOCAL_CONNECTIONS"] = "1" env["XPCOM_DEBUG_BREAK"] = "warn" outputHandler = OutputHandler(self.log) kp_kwargs = { "processOutputLine": [outputHandler], "universal_newlines": True, } valgrind = "valgrind" if not os.path.exists(valgrind): valgrind = findInPath(valgrind) valgrind_args = [ valgrind, "--sym-offsets=yes", "--smc-check=all-non-file", "--vex-iropt-register-updates=allregs-at-mem-access", "--gen-suppressions=all", "--num-callers=36", "--leak-check=full", "--show-possibly-lost=no", "--track-origins=yes", "--trace-children=yes", "-v", # Enable verbosity to get the list of used suppressions # Avoid excessive delays in the presence of spinlocks. # See bug 1309851. "--fair-sched=yes", # Keep debuginfo after library unmap. See bug 1382280. "--keep-debuginfo=yes", # Reduce noise level on rustc and/or LLVM compiled code. # See bug 1365915 "--expensive-definedness-checks=yes", # Compensate for the compiler inlining `new` but not `delete` # or vice versa. "--show-mismatched-frees=no", ] for s in suppressions: valgrind_args.append("--suppressions=" + s) supps_dir = os.path.join(build_dir, "valgrind") supps_file1 = os.path.join(supps_dir, "cross-architecture.sup") valgrind_args.append("--suppressions=" + supps_file1) if mozinfo.os == "linux": machtype = { "x86_64": "x86_64-pc-linux-gnu", "x86": "i386-pc-linux-gnu", }.get(mozinfo.processor) if machtype: supps_file2 = os.path.join(supps_dir, machtype + ".sup") if os.path.isfile(supps_file2): valgrind_args.append("--suppressions=" + supps_file2) exitcode = None timeout = 1800 binary_not_found_exception = None try: runner = FirefoxRunner( profile=profile, binary=self.get_binary_path(), cmdargs=firefox_args, env=env, process_args=kp_kwargs, ) runner.start(debug_args=valgrind_args) exitcode = runner.wait(timeout=timeout) except BinaryNotFoundException as e: binary_not_found_exception = e finally: errs = outputHandler.error_count supps = outputHandler.suppression_count if errs != supps: status = 1 # turns the TBPL job orange self.log( logging.ERROR, "valgrind-fail-parsing", { "errs": errs, "supps": supps }, "TEST-UNEXPECTED-FAIL | valgrind-test | error parsing: {errs} errors " "seen, but {supps} generated suppressions seen", ) elif errs == 0: status = 0 self.log( logging.INFO, "valgrind-pass", {}, "TEST-PASS | valgrind-test | valgrind found no errors", ) else: status = 1 # turns the TBPL job orange # We've already printed details of the errors. if binary_not_found_exception: status = 2 # turns the TBPL job red self.log( logging.ERROR, "valgrind-fail-errors", {"error": str(binary_not_found_exception)}, "TEST-UNEXPECTED-FAIL | valgrind-test | {error}", ) self.log( logging.INFO, "valgrind-fail-errors", {"help": binary_not_found_exception.help()}, "{help}", ) elif exitcode is None: status = 2 # turns the TBPL job red self.log( logging.ERROR, "valgrind-fail-timeout", {"timeout": timeout}, "TEST-UNEXPECTED-FAIL | valgrind-test | Valgrind timed out " "(reached {timeout} second limit)", ) elif exitcode != 0: status = 2 # turns the TBPL job red self.log( logging.ERROR, "valgrind-fail-errors", {"exitcode": exitcode}, "TEST-UNEXPECTED-FAIL | valgrind-test | non-zero exit code " "from Valgrind: {exitcode}", ) httpd.stop() return status
path_mappings = { k: os.path.join(build.topsrcdir, v) for k, v in PATH_MAPPINGS.items() } httpd = MozHttpd(port=PORT, docroot=os.path.join(build.topsrcdir, "build", "pgo"), path_mappings=path_mappings) httpd.start(block=False) locations = ServerLocations() locations.add_host(host='127.0.0.1', port=PORT, options='primary,privileged') with TemporaryDirectory() as profilePath: # TODO: refactor this into mozprofile profile_data_dir = os.path.join(build.topsrcdir, 'testing', 'profiles') with open(os.path.join(profile_data_dir, 'profiles.json'), 'r') as fh: base_profiles = json.load(fh)['profileserver'] prefpaths = [os.path.join(profile_data_dir, profile, 'user.js') for profile in base_profiles] prefs = {} for path in prefpaths: prefs.update(Preferences.read_prefs(path)) interpolation = {"server": "%s:%d" % httpd.httpd.server_address, "OOP": "false"} for k, v in prefs.items():
def valgrind_test(self, suppressions): import json import sys import tempfile from mozbuild.base import MozbuildObject from mozfile import TemporaryDirectory from mozhttpd import MozHttpd from mozprofile import FirefoxProfile, Preferences from mozprofile.permissions import ServerLocations from mozrunner import FirefoxRunner from mozrunner.utils import findInPath from valgrind.output_handler import OutputHandler build_dir = os.path.join(self.topsrcdir, 'build') # XXX: currently we just use the PGO inputs for Valgrind runs. This may # change in the future. httpd = MozHttpd(docroot=os.path.join(build_dir, 'pgo')) httpd.start(block=False) with TemporaryDirectory() as profilePath: #TODO: refactor this into mozprofile prefpath = os.path.join(self.topsrcdir, 'testing', 'profiles', 'prefs_general.js') prefs = {} prefs.update(Preferences.read_prefs(prefpath)) interpolation = { 'server': '%s:%d' % httpd.httpd.server_address, 'OOP': 'false'} prefs = json.loads(json.dumps(prefs) % interpolation) for pref in prefs: prefs[pref] = Preferences.cast(prefs[pref]) quitter = os.path.join(self.topsrcdir, 'tools', 'quitter', '*****@*****.**') locations = ServerLocations() locations.add_host(host='127.0.0.1', port=httpd.httpd.server_port, options='primary') profile = FirefoxProfile(profile=profilePath, preferences=prefs, addons=[quitter], locations=locations) firefox_args = [httpd.get_url()] env = os.environ.copy() env['G_SLICE'] = 'always-malloc' env['MOZ_CC_RUN_DURING_SHUTDOWN'] = '1' env['MOZ_CRASHREPORTER_NO_REPORT'] = '1' env['XPCOM_DEBUG_BREAK'] = 'warn' env.update(self.extra_environment_variables) outputHandler = OutputHandler(self.log) kp_kwargs = {'processOutputLine': [outputHandler]} valgrind = 'valgrind' if not os.path.exists(valgrind): valgrind = findInPath(valgrind) valgrind_args = [ valgrind, '--smc-check=all-non-file', '--vex-iropt-register-updates=allregs-at-mem-access', '--gen-suppressions=all', '--num-callers=36', '--leak-check=full', '--show-possibly-lost=no', '--track-origins=yes', '--trace-children=yes', '-v', # Enable verbosity to get the list of used suppressions ] for s in suppressions: valgrind_args.append('--suppressions=' + s) supps_dir = os.path.join(build_dir, 'valgrind') supps_file1 = os.path.join(supps_dir, 'cross-architecture.sup') valgrind_args.append('--suppressions=' + supps_file1) # MACHTYPE is an odd bash-only environment variable that doesn't # show up in os.environ, so we have to get it another way. machtype = subprocess.check_output(['bash', '-c', 'echo $MACHTYPE']).rstrip() supps_file2 = os.path.join(supps_dir, machtype + '.sup') if os.path.isfile(supps_file2): valgrind_args.append('--suppressions=' + supps_file2) exitcode = None timeout = 1800 try: runner = FirefoxRunner(profile=profile, binary=self.get_binary_path(), cmdargs=firefox_args, env=env, process_args=kp_kwargs) runner.start(debug_args=valgrind_args) exitcode = runner.wait(timeout=timeout) finally: errs = outputHandler.error_count supps = outputHandler.suppression_count if errs != supps: status = 1 # turns the TBPL job orange self.log(logging.ERROR, 'valgrind-fail-parsing', {'errs': errs, 'supps': supps}, 'TEST-UNEXPECTED-FAIL | valgrind-test | error parsing: {errs} errors seen, but {supps} generated suppressions seen') elif errs == 0: status = 0 self.log(logging.INFO, 'valgrind-pass', {}, 'TEST-PASS | valgrind-test | valgrind found no errors') else: status = 1 # turns the TBPL job orange # We've already printed details of the errors. if exitcode == None: status = 2 # turns the TBPL job red self.log(logging.ERROR, 'valgrind-fail-timeout', {'timeout': timeout}, 'TEST-UNEXPECTED-FAIL | valgrind-test | Valgrind timed out (reached {timeout} second limit)') elif exitcode != 0: status = 2 # turns the TBPL job red self.log(logging.ERROR, 'valgrind-fail-errors', {}, 'TEST-UNEXPECTED-FAIL | valgrind-test | non-zero exit code from Valgrind') httpd.stop() return status