def classpath(self, targets, cp=None): classpath = list(cp) if cp else [] classpath_for_targets = ClasspathUtil.classpath_entries( targets, self.context.products.get_data('compile_classpath'), self.confs) classpath.extend(classpath_for_targets) return classpath
def classpath(self, targets, cp=None): classpath = list(cp) if cp else [] classpath_for_targets = ClasspathUtil.classpath_entries( targets, self.context.products.get_data('compile_classpath'), self.confs) classpath.extend(classpath_for_targets) return classpath
def _write_to_artifact_cache(self, vts, compile_context, get_update_artifact_cache_work): assert len(vts.targets) == 1 assert vts.targets[0] == compile_context.target # Noop if the target is uncacheable. if (compile_context.target.has_label('no_cache')): return vt = vts.versioned_targets[0] # Set up args to relativize analysis in the background. portable_analysis_file = self._portable_analysis_for_target( self._analysis_dir, compile_context.target) relativize_args_tuple = (compile_context.analysis_file, portable_analysis_file) # Collect the artifacts for this target. artifacts = [] # Intransitive classpath entries. target_classpath = ClasspathUtil.classpath_entries( (compile_context.target, ), self.context.products.get_data('runtime_classpath'), ('default', ), transitive=False) for entry in target_classpath: if ClasspathUtil.is_jar(entry): artifacts.append(entry) elif ClasspathUtil.is_dir(entry): for rel_file in ClasspathUtil.classpath_entries_contents( [entry]): artifacts.append(os.path.join(entry, rel_file)) else: # non-jar and non-directory classpath entries should be ignored pass # Log file. log_file = self._capture_log_file(compile_context.target) if log_file and os.path.exists(log_file): artifacts.append(log_file) # Jar. artifacts.append(compile_context.jar_file) # Get the 'work' that will publish these artifacts to the cache. # NB: the portable analysis_file won't exist until we finish. vts_artifactfiles_pair = (vt, artifacts + [portable_analysis_file]) update_artifact_cache_work = get_update_artifact_cache_work( [vts_artifactfiles_pair]) # And execute it. if update_artifact_cache_work: work_chain = [ Work(self._analysis_tools.relativize, [relativize_args_tuple], 'relativize'), update_artifact_cache_work ] self.context.submit_background_work_chain( work_chain, parent_workunit_name='cache')
def _write_to_artifact_cache(self, vts, compile_context, get_update_artifact_cache_work): assert len(vts.targets) == 1 assert vts.targets[0] == compile_context.target # Noop if the target is uncacheable. if (compile_context.target.has_label('no_cache')): return vt = vts.versioned_targets[0] # Set up args to relativize analysis in the background. portable_analysis_file = self._portable_analysis_for_target( self._analysis_dir, compile_context.target) relativize_args_tuple = (compile_context.analysis_file, portable_analysis_file) # Collect the artifacts for this target. artifacts = [] # Intransitive classpath entries. target_classpath = ClasspathUtil.classpath_entries( (compile_context.target,), self.context.products.get_data('runtime_classpath'), ('default',), transitive=False) for entry in target_classpath: if ClasspathUtil.is_jar(entry): artifacts.append(entry) elif ClasspathUtil.is_dir(entry): for rel_file in ClasspathUtil.classpath_entries_contents([entry]): artifacts.append(os.path.join(entry, rel_file)) else: # non-jar and non-directory classpath entries should be ignored pass # Log file. log_file = self._capture_log_file(compile_context.target) if log_file and os.path.exists(log_file): artifacts.append(log_file) # Jar. artifacts.append(compile_context.jar_file) # Get the 'work' that will publish these artifacts to the cache. # NB: the portable analysis_file won't exist until we finish. vts_artifactfiles_pair = (vt, artifacts + [portable_analysis_file]) update_artifact_cache_work = get_update_artifact_cache_work([vts_artifactfiles_pair]) # And execute it. if update_artifact_cache_work: work_chain = [ Work(self._analysis_tools.relativize, [relativize_args_tuple], 'relativize'), update_artifact_cache_work ] self.context.submit_background_work_chain(work_chain, parent_workunit_name='cache')
def add_to_jar(tgt): # Fetch classpath entries for this target and any associated resource targets. tgts = [tgt] + tgt.resources if tgt.has_resources else [tgt] target_classpath = ClasspathUtil.classpath_entries( tgts, classpath_products, ('default', ), transitive=False) if target_classpath: targets_added.append(tgt) for entry in target_classpath: add_classpath_entry(entry) if isinstance(tgt, JavaAgent): self._add_agent_manifest(tgt, self._manifest)
def add_to_jar(tgt): # Fetch classpath entries for this target and any associated resource targets. tgts = [tgt] + tgt.resources if tgt.has_resources else [tgt] target_classpath = ClasspathUtil.classpath_entries( tgts, classpath_products, ('default',), transitive=False) if target_classpath: targets_added.append(tgt) for entry in target_classpath: add_classpath_entry(entry) if isinstance(tgt, JavaAgent): self._add_agent_manifest(tgt, self._manifest)
def classpath(self, targets, classpath_prefix=None, classpath_product=None): """Builds a classpath for the current task and given targets, optionally including a classpath prefix or building from a non-default classpath product. :param targets: the targets for which to build the classpath. :param classpath_prefix: optional additional entries to prepend to the classpath. :param classpath_product: an optional ClasspathProduct from which to build the classpath. if not specified, the compile_classpath will be used. :return: a list of classpath strings. """ classpath = list(classpath_prefix) if classpath_prefix else [] classpath_product = classpath_product or self.context.products.get_data('compile_classpath') classpath_for_targets = ClasspathUtil.classpath_entries( targets, classpath_product, self.confs) classpath.extend(classpath_for_targets) return classpath
def get_entries(tgt): # NB: This walk seemed to rely on the assumption that only internal targets had # classes_by_target; that's preserved here by not looking at the classpath entries for # external targets. if not isinstance(tgt, JarLibrary): cp_entries = ClasspathUtil.classpath_entries((tgt,), classpath_products, transitive=False) gathered_entries.update(cp_entries) # Gather classes from the contents of unpacked libraries. unpacked = unpacked_archives.get(tgt) if unpacked: # If there are unpacked_archives then we know this target is an AndroidLibrary. for archives in unpacked.values(): for unpacked_dir in archives: try: gathered_entries.update(self._filter_unpacked_dir(tgt, unpacked_dir, class_files)) except TaskError as e: raise self.DuplicateClassFileException( "Attempted to add duplicate class files from separate libraries into dex file! " "This likely indicates a version conflict in the target's dependencies.\n" "\nTarget:\n{}\n{}".format(target, e))
def classpath(self, targets, classpath_prefix=None, classpath_product=None): """Builds a classpath for the current task and given targets, optionally including a classpath prefix or building from a non-default classpath product. :param targets: the targets for which to build the classpath. :param classpath_prefix: optional additional entries to prepend to the classpath. :param classpath_product: an optional ClasspathProduct from which to build the classpath. if not specified, the runtime_classpath will be used. :return: a list of classpath strings. """ classpath = list(classpath_prefix) if classpath_prefix else [] classpath_product = classpath_product or self.context.products.get_data( 'runtime_classpath') classpath_for_targets = ClasspathUtil.classpath_entries( targets, classpath_product, self.confs) classpath.extend(classpath_for_targets) return classpath
def targets_by_file(self): """Returns a map from abs path of source, class or jar file to an OrderedSet of targets. The value is usually a singleton, because a source or class file belongs to a single target. However a single jar may be provided (transitively or intransitively) by multiple JarLibrary targets. But if there is a JarLibrary target that depends on a jar directly, then that "canonical" target will be the first one in the list of targets. """ targets_by_file = defaultdict(OrderedSet) runtime_classpath = self.context.products.get_data('runtime_classpath') # Compute src -> target. self.context.log.debug('Mapping sources...') buildroot = get_buildroot() # Look at all targets in-play for this pants run. Does not include synthetic targets, for target in self.context.targets(): if isinstance(target, JvmTarget): for src in target.sources_relative_to_buildroot(): targets_by_file[os.path.join(buildroot, src)].add(target) # TODO(Tejal Desai): pantsbuild/pants/65: Remove java_sources attribute for ScalaLibrary if isinstance(target, ScalaLibrary): for java_source in target.java_sources: for src in java_source.sources_relative_to_buildroot(): targets_by_file[os.path.join(buildroot, src)].add(target) # Compute classfile -> target and jar -> target. self.context.log.debug('Mapping classpath...') for target in self.context.targets(): # Classpath content. files = ClasspathUtil.classpath_contents((target,), runtime_classpath, transitive=False) # And jars; for binary deps, zinc doesn't emit precise deps (yet). cp_entries = ClasspathUtil.classpath_entries((target,), runtime_classpath, transitive=False) jars = [cpe for cpe in cp_entries if ClasspathUtil.is_jar(cpe)] for coll in [files, jars]: for f in coll: targets_by_file[f].add(target) return targets_by_file
def get_entries(tgt): # NB: This walk seemed to rely on the assumption that only internal targets had # classes_by_target; that's preserved here by not looking at the classpath entries for # external targets. if not isinstance(tgt, JarLibrary): cp_entries = ClasspathUtil.classpath_entries( (tgt, ), classpath_products, transitive=False) gathered_entries.update(cp_entries) # Gather classes from the contents of unpacked libraries. unpacked = unpacked_archives.get(tgt) if unpacked: # If there are unpacked_archives then we know this target is an AndroidLibrary. for archives in unpacked.values(): for unpacked_dir in archives: try: gathered_entries.update( self._filter_unpacked_dir( tgt, unpacked_dir, class_files)) except TaskError as e: raise self.DuplicateClassFileException( "Attempted to add duplicate class files from separate libraries into dex file! " "This likely indicates a version conflict in the target's dependencies.\n" "\nTarget:\n{}\n{}".format(target, e))