def unpack_target(self, unpacked_whls, unpack_dir): interpreter = self._compatible_interpreter(unpacked_whls) with temporary_dir() as resolve_dir,\ temporary_dir() as extract_dir: try: matched_dist = self._get_matching_wheel(resolve_dir, interpreter, unpacked_whls.all_imported_requirements, unpacked_whls.module_name) ZIP.extract(matched_dist.location, extract_dir) if unpacked_whls.within_data_subdir: data_dir_prefix = '{name}-{version}.data/{subdir}'.format( name=matched_dist.project_name, version=matched_dist.version, subdir=unpacked_whls.within_data_subdir, ) dist_data_dir = os.path.join(extract_dir, data_dir_prefix) else: dist_data_dir = extract_dir unpack_filter = self.get_unpack_filter(unpacked_whls) # Copy over the module's data files into `unpack_dir`. mergetree(dist_data_dir, unpack_dir, file_filter=unpack_filter) except Exception as e: raise self.WheelUnpackingError( "Error extracting wheel for target {}: {}" .format(unpacked_whls, str(e)), e)
def unpack_target(self, unpacked_whls, unpack_dir): interpreter = self._compatible_interpreter(unpacked_whls) with temporary_dir() as resolve_dir: try: matched_dist = self._get_matching_wheel( resolve_dir, interpreter, unpacked_whls.all_imported_requirements, unpacked_whls.module_name, ) wheel_chroot = matched_dist.location if unpacked_whls.within_data_subdir: # N.B.: Wheels with data dirs have the data installed under the top module. dist_data_dir = os.path.join(wheel_chroot, unpacked_whls.module_name) else: dist_data_dir = wheel_chroot unpack_filter = self.get_unpack_filter(unpacked_whls) # Copy over the module's data files into `unpack_dir`. mergetree(dist_data_dir, unpack_dir, file_filter=unpack_filter) except Exception as e: raise self.WheelUnpackingError( "Error extracting wheel for target {}: {}".format( unpacked_whls, str(e)), e)
def unpack_target(self, unpacked_whls, unpack_dir): interpreter = self._compatible_interpreter(unpacked_whls) with temporary_dir() as resolve_dir,\ temporary_dir() as extract_dir: try: matched_dist = self._get_matching_wheel( resolve_dir, interpreter, unpacked_whls.all_imported_requirements, unpacked_whls.module_name) ZIP.extract(matched_dist.location, extract_dir) if unpacked_whls.within_data_subdir: data_dir_prefix = '{name}-{version}.data/{subdir}'.format( name=matched_dist.project_name, version=matched_dist.version, subdir=unpacked_whls.within_data_subdir, ) dist_data_dir = os.path.join(extract_dir, data_dir_prefix) else: dist_data_dir = extract_dir unpack_filter = self.get_unpack_filter(unpacked_whls) # Copy over the module's data files into `unpack_dir`. mergetree(dist_data_dir, unpack_dir, file_filter=unpack_filter) except Exception as e: raise self.WheelUnpackingError( "Error extracting wheel for target {}: {}".format( unpacked_whls, str(e)), e)
def execute_codegen(self, target, target_workdir): """ Invoke the conan pex to fetch conan packages specified by a `ExternalNativeLibrary` target. :param ExternalNativeLibrary target: a target containing conan package specifications. :param str target_workdir: where to copy the installed package contents to. """ conan = self.context.products.get_data(ConanPrep.tool_instance_cls) # TODO: we should really be able to download all of these in one go, and we should make an # upstream PR to allow that against Conan if not. for conan_requirement in target.packages: # See https://docs.conan.io/en/latest/reference/commands/consumer/install.html for # documentation on the 'install' command. argv = [ 'install', conan_requirement.pkg_spec, '--settings', 'os={}'.format(self._conan_os_name), ] for remote in self.get_options().conan_remotes: argv.extend(['--remote', remote]) workunit_factory = functools.partial( self.context.new_workunit, name='install-conan-{}'.format(conan_requirement.pkg_spec), labels=[WorkUnitLabel.TOOL]) # CONAN_USER_HOME is somewhat documented at # https://docs.conan.io/en/latest/mastering/sharing_settings_and_config.html. user_home = self._conan_user_home(conan) env = { 'CONAN_USER_HOME': user_home, } with conan.run_with(workunit_factory, argv, env=env) as (cmdline, exit_code, workunit): if exit_code != 0: raise self.ConanFetchError( 'Error performing conan install with argv {} and environment {}: exited non-zero ({}).' .format(cmdline, env, exit_code), exit_code=exit_code) # Read the stdout from the read-write buffer, from the beginning of the output, and convert # to unicode. conan_install_stdout = workunit.output('stdout').read_from(0).decode() pkg_sha = conan_requirement.parse_conan_stdout_for_pkg_sha(conan_install_stdout) installed_data_dir = os.path.join( user_home, '.conan', 'data', conan_requirement.directory_path, 'package', pkg_sha) # Copy over the contents of the installed package into the target output directory. These # paths are currently hardcoded -- see `ExternalNativeLibrary`. mergetree(os.path.join(installed_data_dir, conan_requirement.include_relpath), os.path.join(target_workdir, 'include')) mergetree(os.path.join(installed_data_dir, conan_requirement.lib_relpath), os.path.join(target_workdir, 'lib'))
def report(self, output_dir, execution_failed_exception=None): if execution_failed_exception: self._settings.log.warn( f'Test failed: {execution_failed_exception}') if self._coverage_force: self._settings.log.warn( 'Generating report even though tests failed, because the' 'coverage-force flag is set.') else: return main = 'org.pantsbuild.scoverage.report.ScoverageReport' scoverage_cp = self._report_path html_report_path = os.path.join(output_dir, 'scoverage', 'reports', 'html') xml_report_path = os.path.join(output_dir, 'scoverage', 'reports', 'xml') safe_mkdir(html_report_path, clean=True) safe_mkdir(xml_report_path, clean=True) final_target_dirs = [] for parent_measurements_dir in self._iter_datadirs(output_dir): final_target_dirs += self.filter_scoverage_targets( parent_measurements_dir) args = [ "--measurementsDirPath", f"{output_dir}", "--htmlDirPath", f"{html_report_path}", "--xmlDirPath", f"{xml_report_path}", "--targetFilters", f"{','.join(final_target_dirs)}" ] result = self._execute_java( classpath=scoverage_cp, main=main, jvm_options=self._settings.coverage_jvm_options, args=args, workunit_factory=self._context.new_workunit, workunit_name='scoverage-report-generator') if result != 0: raise TaskError( f"java {main} ... exited non-zero ({result}) - failed to scoverage-report-generator" ) self._settings.log.info( f"Scoverage html reports available at {html_report_path}") self._settings.log.info( f"Scoverage xml reports available at {xml_report_path}") if self._coverage_output_dir: self._settings.log.debug( f'Scoverage output also written to: {self._coverage_output_dir}!' ) mergetree(output_dir, self._coverage_output_dir) if self._settings.coverage_open: return os.path.join(html_report_path, 'index.html')
def test_mergetree_new(self): with self.tree() as (src, dst_root): dst = os.path.join(dst_root, 'dst') mergetree(src, dst) self.assert_tree(dst, self.Dir('a'), self.File.empty('a/2'), self.Dir('a/b'), self.File('a/b/1', contents='1'), self.File.empty('a/b/2'), self.Dir('b'), self.File('b/1', contents='1'), self.File.empty('b/2'))
def test_mergetree_ignore_dirs(self): with self.tree() as (src, dst): def ignore(root, names): if root == os.path.join(src, 'a'): return ['b'] mergetree(src, dst, ignore=ignore) self.assert_tree(dst, self.Dir('a'), self.File.empty('a/2'), self.Dir('b'), self.File('b/1', contents='1'), self.File.empty('b/2'))
def test_mergetree_ignore_dirs(self): with self.tree() as (src, dst): def ignore(root, names): if root == os.path.join(src, 'a'): return ['b'] mergetree(src, dst, ignore=ignore) self.assert_tree(dst, self.Dir('a'), self.File.empty('a/2'), self.Dir('b'), self.File('b/1', contents=b'1'), self.File.empty('b/2'))
def test_mergetree_symlink(self): with self.tree() as (src, dst): mergetree(src, dst, symlinks=True) self.assert_tree(dst, self.Dir('a'), self.Symlink('a/2'), self.Dir('a/b'), self.File('a/b/1', contents=b'1'), self.File.empty('a/b/2'), # NB: assert_tree does not follow symlinks and so does not descend into the # symlinked b/ dir to find b/1 and b/2 self.Symlink('b'))
def test_mergetree_symlink(self) -> None: with self.tree() as (src, dst): mergetree(src, dst, symlinks=True) self.assert_tree( dst, self.Dir("a"), self.Symlink("a/2"), self.Dir("a/b"), self.File("a/b/1", contents="1"), self.File.empty("a/b/2"), # NB: assert_tree does not follow symlinks and so does not descend into the # symlinked b/ dir to find b/1 and b/2 self.Symlink("b"), )
def test_mergetree_new(self): with self.tree() as (src, dst_root): dst = os.path.join(dst_root, 'dst') mergetree(src, dst) self.assert_tree(dst, self.Dir('a'), self.File.empty('a/2'), self.Dir('a/b'), self.File('a/b/1', contents=b'1'), self.File.empty('a/b/2'), self.Dir('b'), self.File('b/1', contents=b'1'), self.File.empty('b/2'))
def test_mergetree_ignore_dirs(self) -> None: with self.tree() as (src, dst): def ignore(root, names): if root == os.path.join(src, "a"): return ["b"] mergetree(src, dst, ignore=ignore) self.assert_tree( dst, self.Dir("a"), self.File.empty("a/2"), self.Dir("b"), self.File("b/1", contents="1"), self.File.empty("b/2"), )
def test_mergetree_new(self) -> None: with self.tree() as (src, dst_root): dst = os.path.join(dst_root, "dst") mergetree(src, dst) self.assert_tree( dst, self.Dir("a"), self.File.empty("a/2"), self.Dir("a/b"), self.File("a/b/1", contents="1"), self.File.empty("a/b/2"), self.Dir("b"), self.File("b/1", contents="1"), self.File.empty("b/2"), )
def expose_results(self, invalid_tgts, partition, workdirs): external_junit_xml_dir = self.get_options().junit_xml_dir if external_junit_xml_dir: # Either we just ran pytest for a set of invalid targets and generated a junit xml file # specific to that (sub)set or else we hit the cache for the whole partition and skipped # running pytest, simply retrieving the partition's full junit xml file. junitxml_path = workdirs.junitxml_path(*(invalid_tgts or partition)) safe_mkdir(external_junit_xml_dir) shutil.copy2(junitxml_path, external_junit_xml_dir) if self.get_options().coverage: coverage_output_dir = self.get_options().coverage_output_dir if coverage_output_dir: target_dir = coverage_output_dir else: relpath = Target.maybe_readable_identify(partition) pants_distdir = self.context.options.for_global_scope().pants_distdir target_dir = os.path.join(pants_distdir, 'coverage', relpath) mergetree(workdirs.coverage_path, target_dir)
def expose_results(self, invalid_tgts, partition, workdirs): external_junit_xml_dir = self.get_options().junit_xml_dir if external_junit_xml_dir: # Either we just ran pytest for a set of invalid targets and generated a junit xml file # specific to that (sub)set or else we hit the cache for the whole partition and skipped # running pytest, simply retrieving the partition's full junit xml file. junitxml_path = workdirs.junitxml_path(*(invalid_tgts or partition)) safe_mkdir(external_junit_xml_dir) shutil.copy2(junitxml_path, external_junit_xml_dir) if self.get_options().coverage: coverage_output_dir = self.get_options().coverage_output_dir if coverage_output_dir: target_dir = coverage_output_dir else: relpath = Target.maybe_readable_identify(partition) pants_distdir = self.context.options.for_global_scope().pants_distdir target_dir = os.path.join(pants_distdir, 'coverage', relpath) mergetree(workdirs.coverage_path, target_dir)
def unpack_target(self, unpacked_whls, unpack_dir): interpreter = self._compatible_interpreter(unpacked_whls) with temporary_dir() as tmp_dir: # NB: The pex needs to be in a subdirectory for some reason, and pants task caching ensures it # is the only member of this directory, so the dirname doesn't matter. pex_path = os.path.join(tmp_dir, 'xxx.pex') try: pex = self._generate_requirements_pex(pex_path, interpreter, unpacked_whls.all_imported_requirements) wheel_dir = self._get_wheel_dir(pex, unpacked_whls.module_name) matching_wheel_dir = self._get_matching_wheel_dir(wheel_dir, unpacked_whls.module_name) unpack_filter = self.get_unpack_filter(unpacked_whls) # Copy over the module's data files into `unpack_dir`. mergetree(matching_wheel_dir, unpack_dir, file_filter=unpack_filter) except Exception as e: raise self.NativeCodeExtractionError( "Error extracting wheel for target {}: {}" .format(unpacked_whls, str(e)), e)
def _expose_results(self, invalid_tgts, workdirs): external_junit_xml_dir = self.get_options().junit_xml_dir if external_junit_xml_dir: safe_mkdir(external_junit_xml_dir) junitxml_path = workdirs.junitxml_path(*invalid_tgts) if os.path.exists(junitxml_path): # Either we just ran pytest for a set of invalid targets and generated a junit xml file # specific to that (sub)set or else we hit the cache for the whole partition and skipped # running pytest, simply retrieving the partition's full junit xml file. shutil.copy2(junitxml_path, external_junit_xml_dir) if self.get_options().coverage: coverage_output_dir = self.get_options().coverage_output_dir if coverage_output_dir: target_dir = coverage_output_dir else: pants_distdir = self.context.options.for_global_scope().pants_distdir relpath = workdirs.target_set_id() target_dir = os.path.join(pants_distdir, "coverage", relpath) mergetree(workdirs.coverage_path, target_dir)
def test_mergetree_existing(self) -> None: with self.tree() as (src, dst): # Existing empty files touch(os.path.join(dst, "c", "1")) touch(os.path.join(dst, "a", "b", "1")) mergetree(src, dst) self.assert_tree( dst, self.Dir("a"), self.File.empty("a/2"), self.Dir("a/b"), # Existing overlapping file should be overlayed. self.File("a/b/1", contents="1"), self.File.empty("a/b/2"), self.Dir("b"), self.File("b/1", contents="1"), self.File.empty("b/2"), self.Dir("c"), # Existing non-overlapping file should be preserved. self.File.empty("c/1"), )
def test_mergetree_existing(self): with self.tree() as (src, dst): # Existing empty files touch(os.path.join(dst, 'c', '1')) touch(os.path.join(dst, 'a', 'b', '1')) mergetree(src, dst) self.assert_tree( dst, self.Dir('a'), self.File.empty('a/2'), self.Dir('a/b'), # Existing overlapping file should be overlayed. self.File('a/b/1', contents='1'), self.File.empty('a/b/2'), self.Dir('b'), self.File('b/1', contents='1'), self.File.empty('b/2'), self.Dir('c'), # Existing non-overlapping file should be preserved. self.File.empty('c/1'))
def test_mergetree_existing(self): with self.tree() as (src, dst): # Existing empty files touch(os.path.join(dst, 'c', '1')) touch(os.path.join(dst, 'a', 'b', '1')) mergetree(src, dst) self.assert_tree(dst, self.Dir('a'), self.File.empty('a/2'), self.Dir('a/b'), # Existing overlapping file should be overlayed. self.File('a/b/1', contents=b'1'), self.File.empty('a/b/2'), self.Dir('b'), self.File('b/1', contents=b'1'), self.File.empty('b/2'), self.Dir('c'), # Existing non-overlapping file should be preserved. self.File.empty('c/1'))
def test_mergetree_existing_file_mismatch(self): with self.tree() as (src, dst): touch(os.path.join(dst, 'a')) with self.assertRaises(ExistingFileError): mergetree(src, dst)
def test_mergetree_existing_dir_mismatch(self): with self.tree() as (src, dst): os.makedirs(os.path.join(dst, 'b', '1')) with self.assertRaises(ExistingDirError): mergetree(src, dst)
def test_mergetree_existing_file_mismatch(self): with self.tree() as (src, dst): touch(os.path.join(dst, 'a')) with self.assertRaises(ExistingFileError): mergetree(src, dst)
def test_mergetree_existing_dir_mismatch(self): with self.tree() as (src, dst): os.makedirs(os.path.join(dst, 'b', '1')) with self.assertRaises(ExistingDirError): mergetree(src, dst)