def bundle_and_run(self, target, bundle_name, args=None): """Creates the bundle with pants, then does java -jar {bundle_name}.jar to execute the bundle. :param target: target name to compile :param bundle_name: resulting bundle filename (minus .jar extension) :param args: optional arguments to pass to executable :return: stdout as a string on success, raises an Exception on error """ pants_run = self.run_pants(['bundle.jvm', '--archive=zip', target]) self.assert_success(pants_run) # TODO(John Sirois): We need a zip here to suck in external library classpath elements # pointed to by symlinks in the run_pants ephemeral tmpdir. Switch run_pants to be a # contextmanager that yields its results while the tmpdir workdir is still active and change # this test back to using an un-archived bundle. with temporary_dir() as workdir: ZIP.extract('dist/{bundle_name}.zip'.format(bundle_name=bundle_name), workdir) optional_args = [] if args: optional_args = args java_run = subprocess.Popen(['java', '-jar', '{bundle_name}.jar'.format(bundle_name=bundle_name)] + optional_args, stdout=subprocess.PIPE, cwd=workdir) stdout, _ = java_run.communicate() java_returncode = java_run.returncode self.assertEquals(java_returncode, 0) return stdout
def _exec_bundle(self, target, bundle_name): ''' Creates the bundle with pants, then does java -jar {bundle_name}.jar to execute the bundle. :param target: target name to compile :param bundle_name: resulting bundle filename (minus .jar extension) :return: stdout as a string on success, raises an Exception on error ''' pants_run = self.run_pants( ['goal', 'bundle', '--bundle-archive=zip', target]) self.assertEquals( pants_run.returncode, self.PANTS_SUCCESS_CODE, "goal bundle expected success, got {0}\n" "got stderr:\n{1}\n" "got stdout:\n{2}\n".format(pants_run.returncode, pants_run.stderr_data, pants_run.stdout_data)) # TODO(John Sirois): We need a zip here to suck in external library classpath elements # pointed to by symlinks in the run_pants ephemeral tmpdir. Switch run_pants to be a # contextmanager that yields its results while the tmpdir workdir is still active and change # this test back to using an un-archived bundle. with temporary_dir() as workdir: ZIP.extract( 'dist/{bundle_name}.zip'.format(bundle_name=bundle_name), workdir) java_run = subprocess.Popen([ 'java', '-jar', '{bundle_name}.jar'.format(bundle_name=bundle_name) ], stdout=subprocess.PIPE, cwd=workdir) stdout, _ = java_run.communicate() java_returncode = java_run.returncode self.assertEquals(java_returncode, 0) return stdout
def _exec_bundle(self, target, bundle_name): ''' Creates the bundle with pants, then does java -jar {bundle_name}.jar to execute the bundle. :param target: target name to compile :param bundle_name: resulting bundle filename (minus .jar extension) :return: stdout as a string on success, raises an Exception on error ''' pants_run = self.run_pants(['goal', 'bundle', '--bundle-archive=zip', target]) self.assertEquals(pants_run.returncode, self.PANTS_SUCCESS_CODE, "goal bundle expected success, got {0}\n" "got stderr:\n{1}\n" "got stdout:\n{2}\n".format(pants_run.returncode, pants_run.stderr_data, pants_run.stdout_data)) # TODO(John Sirois): We need a zip here to suck in external library classpath elements # pointed to by symlinks in the run_pants ephemeral tmpdir. Switch run_pants to be a # contextmanager that yields its results while the tmpdir workdir is still active and change # this test back to using an un-archived bundle. with temporary_dir() as workdir: ZIP.extract('dist/{bundle_name}.zip'.format(bundle_name=bundle_name), workdir) java_run = subprocess.Popen(['java', '-jar', '{bundle_name}.jar'.format(bundle_name=bundle_name)], stdout=subprocess.PIPE, cwd=workdir) stdout, _ = java_run.communicate() java_returncode = java_run.returncode self.assertEquals(java_returncode, 0) return stdout
def _unpack(self, unpacked_jars): """Extracts files from the downloaded jar files and places them in a work directory. :param UnpackedJars unpacked_jars: target referencing jar_libraries to unpack. """ unpack_dir = self._unpack_dir(unpacked_jars) if os.path.exists(unpack_dir): shutil.rmtree(unpack_dir) if not os.path.exists(unpack_dir): os.makedirs(unpack_dir) include_patterns = self._compile_patterns( unpacked_jars.include_patterns, field_name='include_patterns', spec=unpacked_jars.address.spec) exclude_patterns = self._compile_patterns( unpacked_jars.exclude_patterns, field_name='exclude_patterns', spec=unpacked_jars.address.spec) unpack_filter = lambda f: self._unpack_filter(f, include_patterns, exclude_patterns) products = self.context.products.get('ivy_imports') jarmap = products[unpacked_jars] for path, names in jarmap.items(): for name in names: jar_path = os.path.join(path, name) ZIP.extract(jar_path, unpack_dir, filter_func=unpack_filter)
def bundle_and_run(self, target, bundle_name, args=None): """Creates the bundle with pants, then does java -jar {bundle_name}.jar to execute the bundle. :param target: target name to compile :param bundle_name: resulting bundle filename (minus .jar extension) :param args: optional arguments to pass to executable :return: stdout as a string on success, raises an Exception on error """ pants_run = self.run_pants(['bundle', '--archive=zip', target]) self.assert_success(pants_run) # TODO(John Sirois): We need a zip here to suck in external library classpath elements # pointed to by symlinks in the run_pants ephemeral tmpdir. Switch run_pants to be a # contextmanager that yields its results while the tmpdir workdir is still active and change # this test back to using an un-archived bundle. with temporary_dir() as workdir: ZIP.extract( 'dist/{bundle_name}.zip'.format(bundle_name=bundle_name), workdir) optional_args = [] if args: optional_args = args java_run = subprocess.Popen([ 'java', '-jar', '{bundle_name}.jar'.format(bundle_name=bundle_name) ] + optional_args, stdout=subprocess.PIPE, cwd=workdir) stdout, _ = java_run.communicate() java_returncode = java_run.returncode self.assertEquals(java_returncode, 0) return stdout
def _unpack(self, unpacked_jars): """Extracts files from the downloaded jar files and places them in a work directory. :param UnpackedJars unpacked_jars: target referencing jar_libraries to unpack. """ unpack_dir = self._unpack_dir(unpacked_jars) if os.path.exists(unpack_dir): shutil.rmtree(unpack_dir) if not os.path.exists(unpack_dir): os.makedirs(unpack_dir) include_patterns = self._compile_patterns(unpacked_jars.include_patterns, field_name='include_patterns', spec=unpacked_jars.address.spec) exclude_patterns = self._compile_patterns(unpacked_jars.exclude_patterns, field_name='exclude_patterns', spec=unpacked_jars.address.spec) unpack_filter = lambda f: self._unpack_filter(f, include_patterns, exclude_patterns) products = self.context.products.get('ivy_imports') jarmap = products[unpacked_jars] for path, names in jarmap.items(): for name in names: jar_path = os.path.join(path, name) ZIP.extract(jar_path, unpack_dir, filter_func=unpack_filter)
def _unpack_artifacts(self, imports): # Unpack the aar and jar library artifacts. If the aar files have a jar in the contents, # unpack that jar as well. for archive_path in imports: for archive in imports[archive_path]: jar_outdir = self.unpacked_jar_location(archive) if archive.endswith('.jar'): jar_file = os.path.join(archive_path, archive) elif archive.endswith('.aar'): unpacked_aar_destination = self.unpacked_aar_location( archive) jar_file = os.path.join(unpacked_aar_destination, 'classes.jar') # Unpack .aar files. if archive not in self._unpacked_archives: ZIP.extract(os.path.join(archive_path, archive), unpacked_aar_destination) self._unpacked_archives.update([archive]) # Create an .aar/classes.jar signature for self._unpacked_archives. archive = os.path.join(archive, 'classes.jar') else: raise self.UnexpectedArchiveType( 'Android dependencies can be .aar or .jar ' 'archives (was: {})'.format(archive)) # Unpack the jar files. if archive not in self._unpacked_archives and os.path.isfile( jar_file): ZIP.extract(jar_file, jar_outdir) self._unpacked_archives.update([archive])
def unpack_target(self, unpacked_whls, unpack_dir): interpreter = self._compatible_interpreter(unpacked_whls) with temporary_dir() as resolve_dir,\ temporary_dir() as extract_dir: try: matched_dist = self._get_matching_wheel(resolve_dir, interpreter, unpacked_whls.all_imported_requirements, unpacked_whls.module_name) ZIP.extract(matched_dist.location, extract_dir) if unpacked_whls.within_data_subdir: data_dir_prefix = '{name}-{version}.data/{subdir}'.format( name=matched_dist.project_name, version=matched_dist.version, subdir=unpacked_whls.within_data_subdir, ) dist_data_dir = os.path.join(extract_dir, data_dir_prefix) else: dist_data_dir = extract_dir unpack_filter = self.get_unpack_filter(unpacked_whls) # Copy over the module's data files into `unpack_dir`. mergetree(dist_data_dir, unpack_dir, file_filter=unpack_filter) except Exception as e: raise self.WheelUnpackingError( "Error extracting wheel for target {}: {}" .format(unpacked_whls, str(e)), e)
def unpack_target(self, unpacked_whls, unpack_dir): interpreter = self._compatible_interpreter(unpacked_whls) with temporary_dir() as resolve_dir,\ temporary_dir() as extract_dir: try: matched_dist = self._get_matching_wheel( resolve_dir, interpreter, unpacked_whls.all_imported_requirements, unpacked_whls.module_name) ZIP.extract(matched_dist.location, extract_dir) if unpacked_whls.within_data_subdir: data_dir_prefix = '{name}-{version}.data/{subdir}'.format( name=matched_dist.project_name, version=matched_dist.version, subdir=unpacked_whls.within_data_subdir, ) dist_data_dir = os.path.join(extract_dir, data_dir_prefix) else: dist_data_dir = extract_dir unpack_filter = self.get_unpack_filter(unpacked_whls) # Copy over the module's data files into `unpack_dir`. mergetree(dist_data_dir, unpack_dir, file_filter=unpack_filter) except Exception as e: raise self.WheelUnpackingError( "Error extracting wheel for target {}: {}".format( unpacked_whls, str(e)), e)
def _unpack_artifacts(self, jar_imports): # Unpack the aar and jar library artifacts. If the aar files have a jar in the contents, # unpack that jar as well. for coordinate, aar_or_jar in jar_imports: jar_outdir = self.unpacked_jar_location(coordinate) if 'jar' == coordinate.ext: jar_file = aar_or_jar elif 'aar' == coordinate.ext: unpacked_aar_destination = self.unpacked_aar_location(coordinate) jar_file = os.path.join(unpacked_aar_destination, 'classes.jar') # Unpack .aar files. if coordinate not in self._unpacked_archives: ZIP.extract(aar_or_jar, unpacked_aar_destination) self._unpacked_archives.add(aar_or_jar) # Create an .aar/classes.jar signature for self._unpacked_archives. coordinate = M2Coordinate(org=coordinate.org, name=coordinate.name, rev=coordinate.rev, classifier=coordinate.classifier, ext='classes.jar') else: raise self.UnexpectedArchiveType('Android dependencies can be .aar or .jar archives ' '(was: {} at {})'.format(coordinate, aar_or_jar)) # Unpack the jar files. if coordinate not in self._unpacked_archives and os.path.isfile(jar_file): ZIP.extract(jar_file, jar_outdir) self._unpacked_archives.add(aar_or_jar)
def unpack_target(self, unpacked_jars, unpack_dir): deprecated_conditional( lambda: True, removal_version="1.31.0.dev0", entity_description="The `unpack-jars` goal", hint_message= "Contact the Pants team on Slack or [email protected] " "if you need this functionality.", ) direct_coords = { jar.coordinate for jar in unpacked_jars.all_imported_jar_deps } unpack_filter = self.get_unpack_filter(unpacked_jars) jar_import_products = self.context.products.get_data(JarImportProducts) for coordinate, jar_path in jar_import_products.imports(unpacked_jars): if not unpacked_jars.payload.intransitive or coordinate in direct_coords: self.context.log.info( "Unpacking jar {coordinate} from {jar_path} to {unpack_dir}." .format(coordinate=coordinate, jar_path=jar_path, unpack_dir=unpack_dir)) ZIP.extract(jar_path, unpack_dir, filter_func=unpack_filter)
def _unpack_artifacts(self, imports): # Unpack the aar and jar library artifacts. If the aar files have a jar in the contents, # unpack that jar as well. for archive_path in imports: for archive in imports[archive_path]: jar_outdir = self.unpacked_jar_location(archive) if archive.endswith('.jar'): jar_file = os.path.join(archive_path, archive) elif archive.endswith('.aar'): unpacked_aar_destination = self.unpacked_aar_location( archive) jar_file = os.path.join(unpacked_aar_destination, 'classes.jar') # Unpack .aar files. if archive not in self._unpacked_archives: ZIP.extract( os.path.join(archive_path, archive), unpacked_aar_destination) self._unpacked_archives.update([archive]) # Create an .aar/classes.jar signature for self._unpacked_archives. archive = os.path.join(archive, 'classes.jar') else: raise self.UnexpectedArchiveType( 'Android dependencies can be .aar or .jar ' 'archives (was: {})'.format(archive)) # Unpack the jar files. if archive not in self._unpacked_archives and os.path.isfile( jar_file): ZIP.extract(jar_file, jar_outdir) self._unpacked_archives.update([archive])
def ensure_classfiles(target_name, classfiles): cache_test_subdir = cache_test_subdirs[target_name] cache_dir_entries = os.listdir(cache_test_subdir) self.assertEqual(len(cache_dir_entries), 1) cache_entry = cache_dir_entries[0] with self.temporary_workdir() as cache_unzip_dir, self.temporary_workdir() as rsc_dir, self.temporary_workdir() as zinc_dir: cache_path = os.path.join(cache_test_subdir, cache_entry) TGZ.extract(cache_path, cache_unzip_dir) # assert that the unzip dir has the directory structure # ./compile/rsc/{hash}/{x}.{target_name}/{hash2} path = descend_subdirs(cache_unzip_dir, ["compile", "rsc", None, None]) self.assertTrue(path.endswith(f".{target_name}")) path = take_only_subdir(path) # TODO: Surprisingly, rsc/m.jar is created even for dependee-less targets. self.assertEqual(sorted(os.listdir(path)), ["rsc", "zinc"]) # Check that zinc/z.jar and rsc/m.jar both exist # and that their contents contain the right classfiles zincpath = os.path.join(path, "zinc") zjar = os.path.join(zincpath, "z.jar") self.assertTrue(os.path.exists(zjar)) ZIP.extract(zjar, zinc_dir) self.assertEqual( sorted(os.listdir(zinc_dir)), sorted(["compile_classpath", *classfiles]) ) rscpath = os.path.join(path, "rsc") mjar = os.path.join(rscpath, "m.jar") self.assertTrue(os.path.exists(mjar)) ZIP.extract(mjar, rsc_dir) self.assertEqual(sorted(os.listdir(rsc_dir)), sorted(classfiles))
def _unpack(self, unpacked_archives): """Extracts files from the downloaded jar files and places them in a work directory. :param UnpackedArchives unpacked_archives: target referencing jar_libraries to unpack. """ self.context.log.info('Unpacking {}'.format( unpacked_archives.address.spec)) unpack_dir = unpacked_archives.destination safe_mkdir(unpack_dir, clean=True) unpack_filter = self.get_unpack_filter(unpacked_archives) classpath_products = ClasspathProducts( self.get_options().pants_workdir) resolve_hashes = self.resolve(None, unpacked_archives.dependencies, classpath_products) ivy_cache_dir = os.path.expanduser( IvySubsystem.global_instance().get_options().cache_dir) def to_m2(jar): return M2Coordinate(org=jar.org, name=jar.name, rev=jar.rev, classifier=jar.classifier, ext=jar.ext) libraries = self.context.build_graph.transitive_subgraph_of_addresses( [unpacked_archives.address]) libraries = [t for t in libraries if isinstance(t, JarLibrary)] coords = set() for library in libraries: coords.update(to_m2(jar) for jar in library.payload.jars) for resolve_hash in resolve_hashes: path = IvyUtils.xml_report_path(ivy_cache_dir, resolve_hash, 'default') info = IvyUtils.parse_xml_report('default', path) refs_for_libraries = set() for ref in info.modules_by_ref.keys(): if to_m2(ref) in coords: refs_for_libraries.add(ref) memo = {} for ref in tuple(refs_for_libraries): info.traverse_dependency_graph(ref, refs_for_libraries.add, memo) for ref in sorted(refs_for_libraries): module = info.modules_by_ref[ref] artifact_path = module.artifact self.context.log.debug('Extracting {} to {}.'.format( to_m2(ref), unpack_dir)) if artifact_path.endswith('.zip') or artifact_path.endswith( '.jar'): ZIP.extract(artifact_path, unpack_dir, filter_func=unpack_filter) else: self._extract_tar(artifact_path, unpack_dir, filter_func=unpack_filter)
def _extract_jar(self, jar_path): """Extracts the jar to a subfolder of workdir/extracted and returns the path to it.""" with open(jar_path, 'rb') as f: outdir = os.path.join(self.workdir, 'extracted', sha1(f.read()).hexdigest()) if not os.path.exists(outdir): ZIP.extract(jar_path, outdir) self.context.log.debug('Extracting jar at {jar_path}.'.format(jar_path=jar_path)) return outdir
def bundle_and_run(self, target, bundle_name, bundle_jar_name=None, bundle_options=None, args=None, expected_bundle_jar_content=None, expected_bundle_content=None, library_jars_are_symlinks=True): """Creates the bundle with pants, then does java -jar {bundle_name}.jar to execute the bundle. :param target: target name to compile :param bundle_name: resulting bundle filename (minus .zip extension) :param bundle_jar_name: monolithic jar filename (minus .jar extension), if None will be the same as bundle_name :param bundle_options: additional options for bundle :param args: optional arguments to pass to executable :param expected_bundle_content: verify the bundle zip content :param expected_bundle_jar_content: verify the bundle jar content :param library_jars_are_symlinks: verify library jars are symlinks if True, and actual files if False. Default `True` because we always create symlinks for both external and internal dependencies, only exception is when shading is used. :return: stdout as a string on success, raises an Exception on error """ bundle_jar_name = bundle_jar_name or bundle_name bundle_options = bundle_options or [] bundle_options = ['bundle.jvm'] + bundle_options + ['--archive=zip', target] with self.pants_results(bundle_options) as pants_run: self.assert_success(pants_run) self.assertTrue(check_symlinks('dist/{bundle_name}-bundle/libs'.format(bundle_name=bundle_name), library_jars_are_symlinks)) # TODO(John Sirois): We need a zip here to suck in external library classpath elements # pointed to by symlinks in the run_pants ephemeral tmpdir. Switch run_pants to be a # contextmanager that yields its results while the tmpdir workdir is still active and change # this test back to using an un-archived bundle. with temporary_dir() as workdir: ZIP.extract('dist/{bundle_name}.zip'.format(bundle_name=bundle_name), workdir) if expected_bundle_content: self.assertTrue(contains_exact_files(workdir, expected_bundle_content)) if expected_bundle_jar_content: with temporary_dir() as check_bundle_jar_dir: bundle_jar = os.path.join(workdir, '{bundle_jar_name}.jar' .format(bundle_jar_name=bundle_jar_name)) ZIP.extract(bundle_jar, check_bundle_jar_dir) self.assertTrue(contains_exact_files(check_bundle_jar_dir, expected_bundle_jar_content)) optional_args = [] if args: optional_args = args java_run = subprocess.Popen(['java', '-jar', '{bundle_jar_name}.jar'.format(bundle_jar_name=bundle_jar_name)] + optional_args, stdout=subprocess.PIPE, cwd=workdir) stdout, _ = java_run.communicate() java_returncode = java_run.returncode self.assertEqual(java_returncode, 0) return stdout.decode('utf-8')
def test_shader_project(self): """Test that the binary target at the ``shading_project`` can be built and run. Explicitly checks that the classes end up with the correct shaded fully qualified classnames. """ shading_project = "testprojects/src/java/org/pantsbuild/testproject/shading" self.assert_success(self.run_pants(["clean-all"])) self.assert_success(self.run_pants(["binary", shading_project])) expected_classes = { # Explicitly excluded by a shading_exclude() rule. "org/pantsbuild/testproject/shadingdep/PleaseDoNotShadeMe.class", # Not matched by any rule, so stays the same. "org/pantsbuild/testproject/shading/Main.class", # Shaded with the target_id prefix, along with the default pants prefix. ("__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/otherpackage/" "ShadeWithTargetId.class"), # Also shaded with the target_id prefix and default pants prefix, but for a different target # (so the target_id is different). ("__shaded_by_pants__/org/pantsbuild/testproject/shading/ShadeSelf.class" ), # All these are shaded by the same shading_relocate_package(), which is recursive by default. "__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/subpackage/Subpackaged.class", "__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/SomeClass.class", "__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/Dependency.class", # Shaded by a shading_relocate() that completely renames the package and class name. "org/pantsbuild/testproject/foo/bar/MyNameIsDifferentNow.class", } path = os.path.join("dist", "shading.jar") init_subsystem(DistributionLocator) execute_java = DistributionLocator.cached( minimum_version="1.6").execute_java self.assertEqual( 0, execute_java(classpath=[path], main="org.pantsbuild.testproject.shading.Main")) self.assertEqual( 0, execute_java( classpath=[path], main="org.pantsbuild.testproject.foo.bar.MyNameIsDifferentNow" ), ) received_classes = set() with temporary_dir() as tempdir: ZIP.extract(path, tempdir, filter_func=lambda f: f.endswith(".class")) for root, dirs, files in os.walk(tempdir): for name in files: received_classes.add( os.path.relpath(os.path.join(root, name), tempdir)) self.assertEqual(expected_classes, received_classes)
def bundle_and_run(self, target, bundle_name, bundle_jar_name=None, bundle_options=None, args=None, expected_bundle_jar_content=None, expected_bundle_content=None, library_jars_are_symlinks=True): """Creates the bundle with pants, then does java -jar {bundle_name}.jar to execute the bundle. :param target: target name to compile :param bundle_name: resulting bundle filename (minus .zip extension) :param bundle_jar_name: monolithic jar filename (minus .jar extension), if None will be the same as bundle_name :param bundle_options: additional options for bundle :param args: optional arguments to pass to executable :param expected_bundle_content: verify the bundle zip content :param expected_bundle_jar_content: verify the bundle jar content :param library_jars_are_symlinks: verify library jars are symlinks if True, and actual files if False. Default `True` because we always create symlinks for both external and internal dependencies, only exception is when shading is used. :return: stdout as a string on success, raises an Exception on error """ bundle_jar_name = bundle_jar_name or bundle_name bundle_options = bundle_options or [] bundle_options = ['bundle.jvm'] + bundle_options + ['--archive=zip', target] with self.pants_results(bundle_options) as pants_run: self.assert_success(pants_run) self.assertTrue(check_symlinks('dist/{bundle_name}-bundle/libs'.format(bundle_name=bundle_name), library_jars_are_symlinks)) # TODO(John Sirois): We need a zip here to suck in external library classpath elements # pointed to by symlinks in the run_pants ephemeral tmpdir. Switch run_pants to be a # contextmanager that yields its results while the tmpdir workdir is still active and change # this test back to using an un-archived bundle. with temporary_dir() as workdir: ZIP.extract('dist/{bundle_name}.zip'.format(bundle_name=bundle_name), workdir) if expected_bundle_content: self.assertTrue(contains_exact_files(workdir, expected_bundle_content)) if expected_bundle_jar_content: with temporary_dir() as check_bundle_jar_dir: bundle_jar = os.path.join(workdir, '{bundle_jar_name}.jar' .format(bundle_jar_name=bundle_jar_name)) ZIP.extract(bundle_jar, check_bundle_jar_dir) self.assertTrue(contains_exact_files(check_bundle_jar_dir, expected_bundle_jar_content)) optional_args = [] if args: optional_args = args java_run = subprocess.Popen(['java', '-jar', '{bundle_jar_name}.jar'.format(bundle_jar_name=bundle_jar_name)] + optional_args, stdout=subprocess.PIPE, cwd=workdir) stdout, _ = java_run.communicate() java_returncode = java_run.returncode self.assertEquals(java_returncode, 0) return stdout
def _extract_jar(self, jar_path): """Extracts the jar to a subfolder of workdir/extracted and returns the path to it.""" with open(jar_path, 'rb') as f: outdir = os.path.join(self.workdir, 'extracted', sha1(f.read()).hexdigest()) if not os.path.exists(outdir): ZIP.extract(jar_path, outdir) self.context.log.debug('Extracting jar at {jar_path}.'.format(jar_path=jar_path)) else: self.context.log.debug('Jar already extracted at {jar_path}.'.format(jar_path=jar_path)) return outdir
def unpack_target(self, unpacked_jars, unpack_dir): direct_coords = {jar.coordinate for jar in unpacked_jars.all_imported_jar_deps} unpack_filter = self.get_unpack_filter(unpacked_jars) jar_import_products = self.context.products.get_data(JarImportProducts) for coordinate, jar_path in jar_import_products.imports(unpacked_jars): if not unpacked_jars.payload.intransitive or coordinate in direct_coords: self.context.log.info('Unpacking jar {coordinate} from {jar_path} to {unpack_dir}.'.format( coordinate=coordinate, jar_path=jar_path, unpack_dir=unpack_dir)) ZIP.extract(jar_path, unpack_dir, filter_func=unpack_filter)
def _get_jar_class_versions(self, jarname): path = os.path.join('dist', jarname) self.assertTrue(os.path.exists(path), '{} does not exist.'.format(path)) class_to_version = {} with temporary_dir() as tempdir: ZIP.extract(path, tempdir, filter_func=lambda f: f.endswith('.class')) for root, dirs, files in os.walk(tempdir): for name in files: path = os.path.abspath(os.path.join(root, name)) class_to_version[os.path.relpath(path, tempdir)] = self.determine_version(path) return class_to_version
def _extract_jar(self, coordinate, jar_path): """Extracts the jar to a subfolder of workdir/extracted and returns the path to it.""" with open(jar_path, "rb") as f: outdir = os.path.join(self.workdir, "extracted", sha1(f.read()).hexdigest()) if not os.path.exists(outdir): ZIP.extract(jar_path, outdir) self.context.log.debug("Extracting jar {jar} at {jar_path}.".format(jar=coordinate, jar_path=jar_path)) else: self.context.log.debug( "Jar {jar} already extracted at {jar_path}.".format(jar=coordinate, jar_path=jar_path) ) return outdir
def test_zip_filter(self): def do_filter(path): return path == 'allowed.txt' with temporary_dir() as fromdir: touch(os.path.join(fromdir, 'allowed.txt')) touch(os.path.join(fromdir, 'disallowed.txt')) with temporary_dir() as archivedir: archive = ZIP.create(fromdir, archivedir, 'archive') with temporary_dir() as todir: ZIP.extract(archive, todir, filter=do_filter) self.assertEquals(set(['allowed.txt']), self._listtree(todir, empty_dirs=False))
def test_zip_filter(self): def do_filter(path): return path == 'allowed.txt' with temporary_dir() as fromdir: touch(os.path.join(fromdir, 'allowed.txt')) touch(os.path.join(fromdir, 'disallowed.txt')) with temporary_dir() as archivedir: archive = ZIP.create(fromdir, archivedir, 'archive') with temporary_dir() as todir: ZIP.extract(archive, todir, filter_func=do_filter) self.assertEquals(set(['allowed.txt']), self._listtree(todir, empty_dirs=False))
def _dump(self, jar_path, jar_file): self.context.log.debug(' dumping %s' % jar_path) with temporary_dir() as tmpdir: try: ZIP.extract(jar_path, tmpdir) except zipfile.BadZipfile: raise TaskError('Bad JAR file, maybe empty: %s' % jar_path) for root, dirs, files in os.walk(tmpdir): for f in files: path = os.path.join(root, f) relpath = os.path.relpath(path, tmpdir).decode('utf-8') if Manifest.PATH != relpath: jar_file.write(path, relpath)
def _extract_jar(self, coordinate, jar_path): """Extracts the jar to a subfolder of workdir/extracted and returns the path to it.""" with open(jar_path, "rb") as f: sha = sha1(f.read()).hexdigest() outdir = os.path.join(self.workdir, "extracted", sha) if not os.path.exists(outdir): ZIP.extract(jar_path, outdir) self.context.log.debug( "Extracting jar {jar} at {jar_path}.".format( jar=coordinate, jar_path=jar_path)) else: self.context.log.debug( "Jar {jar} already extracted at {jar_path}.".format( jar=coordinate, jar_path=jar_path)) return outdir
def _unpack(self, unpacked_archives): """Extracts files from the downloaded jar files and places them in a work directory. :param UnpackedArchives unpacked_archives: target referencing jar_libraries to unpack. """ self.context.log.info('Unpacking {}'.format(unpacked_archives.address.spec)) unpack_dir = unpacked_archives.destination safe_mkdir(unpack_dir, clean=True) unpack_filter = self.get_unpack_filter(unpacked_archives) classpath_products = ClasspathProducts(self.get_options().pants_workdir) resolve_hashes = self.resolve(None, unpacked_archives.dependencies, classpath_products) ivy_cache_dir = os.path.expanduser(IvySubsystem.global_instance().get_options().cache_dir) def to_m2(jar): return M2Coordinate(org=jar.org, name=jar.name, rev=jar.rev, classifier=jar.classifier, ext=jar.ext) libraries = self.context.build_graph.transitive_subgraph_of_addresses([unpacked_archives.address]) libraries = [t for t in libraries if isinstance(t, JarLibrary)] coords = set() for library in libraries: coords.update(to_m2(jar) for jar in library.payload.jars) for resolve_hash in resolve_hashes: path = IvyUtils.xml_report_path(ivy_cache_dir, resolve_hash, 'default') info = IvyUtils.parse_xml_report('default', path) refs_for_libraries = set() for ref in info.modules_by_ref.keys(): if to_m2(ref) in coords: refs_for_libraries.add(ref) memo = {} for ref in tuple(refs_for_libraries): info.traverse_dependency_graph(ref, refs_for_libraries.add, memo) for ref in sorted(refs_for_libraries): module = info.modules_by_ref[ref] artifact_path = module.artifact self.context.log.debug('Extracting {} to {}.'.format(to_m2(ref), unpack_dir)) if artifact_path.endswith('.zip') or artifact_path.endswith('.jar'): ZIP.extract(artifact_path, unpack_dir, filter_func=unpack_filter) else: self._extract_tar(artifact_path, unpack_dir, filter_func=unpack_filter)
def test_shader_project(self): """Test that the binary target at the ``shading_project`` can be built and run. Explicitly checks that the classes end up with the correct shaded fully qualified classnames. """ shading_project = 'testprojects/src/java/org/pantsbuild/testproject/shading' self.assert_success(self.run_pants(['clean-all'])) self.assert_success(self.run_pants(['binary', shading_project])) expected_classes = { # Explicitly excluded by a shading_exclude() rule. 'org/pantsbuild/testproject/shadingdep/PleaseDoNotShadeMe.class', # Not matched by any rule, so stays the same. 'org/pantsbuild/testproject/shading/Main.class', # Shaded with the target_id prefix, along with the default pants prefix. ('__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/otherpackage/' 'ShadeWithTargetId.class'), # Also shaded with the target_id prefix and default pants prefix, but for a different target # (so the target_id is different). ('__shaded_by_pants__/org/pantsbuild/testproject/shading/ShadeSelf.class'), # All these are shaded by the same shading_relocate_package(), which is recursive by default. '__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/subpackage/Subpackaged.class', '__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/SomeClass.class', '__shaded_by_pants__/org/pantsbuild/testproject/shadingdep/Dependency.class', # Shaded by a shading_relocate() that completely renames the package and class name. 'org/pantsbuild/testproject/foo/bar/MyNameIsDifferentNow.class', } path = os.path.join('dist', 'shading.jar') init_subsystem(DistributionLocator) execute_java = DistributionLocator.cached(minimum_version='1.6').execute_java self.assertEqual(0, execute_java(classpath=[path], main='org.pantsbuild.testproject.shading.Main')) self.assertEqual(0, execute_java(classpath=[path], main='org.pantsbuild.testproject.foo.bar.MyNameIsDifferentNow')) received_classes = set() with temporary_dir() as tempdir: ZIP.extract(path, tempdir, filter_func=lambda f: f.endswith('.class')) for root, dirs, files in os.walk(tempdir): for name in files: received_classes.add(os.path.relpath(os.path.join(root, name), tempdir)) self.assertEqual(expected_classes, received_classes)
def check_zip_file_content(zip_file, expected_files): """Check zip file contains expected files as well as verify their contents are as expected. :param zip_file: Path to the zip file. :param expected_files: A map from file path included in the zip to its content. Set content to `None` to skip checking. :return: """ with temporary_dir() as workdir: ZIP.extract(zip_file, workdir) if not contains_exact_files(workdir, expected_files.keys()): return False for rel_path in expected_files: path = os.path.join(workdir, rel_path) if expected_files[rel_path] and not check_file_content(path, expected_files[rel_path]): return False return True
def _unpack(self, unpacked_jars): """Extracts files from the downloaded jar files and places them in a work directory. :param UnpackedJars unpacked_jars: target referencing jar_libraries to unpack. """ unpack_dir = self._unpack_dir(unpacked_jars) if os.path.exists(unpack_dir): shutil.rmtree(unpack_dir) if not os.path.exists(unpack_dir): os.makedirs(unpack_dir) unpack_filter = self.get_unpack_filter(unpacked_jars) jar_import_products = self.context.products.get_data(JarImportProducts) for coordinate, jar_path in jar_import_products.imports(unpacked_jars): self.context.log.debug('Unpacking jar {coordinate} from {jar_path} to {unpack_dir}.' .format(coordinate=coordinate, jar_path=jar_path, unpack_dir=unpack_dir)) ZIP.extract(jar_path, unpack_dir, filter_func=unpack_filter)
def _unpack(self, unpacked_jars): """Extracts files from the downloaded jar files and places them in a work directory. :param UnpackedJars unpacked_jars: target referencing jar_libraries to unpack. """ unpack_dir = self._unpack_dir(unpacked_jars) if os.path.exists(unpack_dir): shutil.rmtree(unpack_dir) if not os.path.exists(unpack_dir): os.makedirs(unpack_dir) unpack_filter = self.get_unpack_filter(unpacked_jars) products = self.context.products.get('ivy_imports') jarmap = products[unpacked_jars] for path, names in jarmap.items(): for name in names: jar_path = os.path.join(path, name) ZIP.extract(jar_path, unpack_dir, filter_func=unpack_filter)
def _unpack(self, unpacked_jars): """Extracts files from the downloaded jar files and places them in a work directory. :param UnpackedJars unpacked_jars: target referencing jar_libraries to unpack. """ unpack_dir = self._unpack_dir(unpacked_jars) if os.path.exists(unpack_dir): shutil.rmtree(unpack_dir) if not os.path.exists(unpack_dir): os.makedirs(unpack_dir) unpack_filter = self.get_unpack_filter(unpacked_jars) jar_import_products = self.context.products.get_data(JarImportProducts) for coordinate, jar_path in jar_import_products.imports(unpacked_jars): self.context.log.debug( 'Unpacking jar {coordinate} from {jar_path} to {unpack_dir}.'. format(coordinate=coordinate, jar_path=jar_path, unpack_dir=unpack_dir)) ZIP.extract(jar_path, unpack_dir, filter_func=unpack_filter)
def check_zip_file_content(zip_file, expected_files): """Check zip file contains expected files as well as verify their contents are as expected. :API: public :param zip_file: Path to the zip file. :param expected_files: A map from file path included in the zip to its content. Set content to `None` to skip checking. :return: """ with temporary_dir() as workdir: ZIP.extract(zip_file, workdir) if not contains_exact_files(workdir, expected_files.keys()): return False for rel_path in expected_files: path = os.path.join(workdir, rel_path) if expected_files[rel_path] and not check_file_content( path, expected_files[rel_path]): return False return True
def test_bundle_of_nonascii_classes(self): # TODO(John Sirois): We need a zip here to suck in external library classpath elements # pointed to by symlinks in the run_pants ephemeral tmpdir. Switch run_pants to be a # contextmanager that yields its results while the tmpdir chroot is still active and change # this test back to using an un-archived bundle. pants_run = self.run_pants(['goal', 'bundle', '--bundle-archive=zip', 'src/java/com/pants/testproject/unicode/main']) self.assertEquals(pants_run.returncode, self.PANTS_SUCCESS_CODE, "goal bundle expected success, got {0}\n" "got stderr:\n{1}\n" "got stdout:\n{2}\n".format(pants_run.returncode, pants_run.stderr_data, pants_run.stdout_data)) with temporary_dir() as chroot: ZIP.extract('dist/unicode.zip', chroot) java_run = subprocess.Popen(['java', '-jar', 'unicode.jar'], stdout=subprocess.PIPE, cwd=chroot) stdout, _ = java_run.communicate() java_returncode = java_run.returncode self.assertEquals(java_returncode, 0) self.assertTrue("Have a nice day!" in stdout) self.assertTrue("shapeless success" in stdout)