def print_deps(printed, dep, indent=0): dep_id, _ = self._dep_id(dep) if dep_id in printed: if not self.is_minimal: print_dep("*%s" % dep_id, indent) else: if not self.is_external_only: print_dep(dep_id, indent) printed.add(dep_id) indent += 1 if is_jvm(dep): for internal_dep in dep.internal_dependencies: print_deps(printed, internal_dep, indent) if not self.is_internal_only: if is_jvm(dep): for jar_dep in dep.jar_dependencies: jar_dep_id, internal = self._dep_id(jar_dep) if not internal: if jar_dep_id not in printed or ( not self.is_minimal and not self.is_external_only): print_dep(jar_dep_id, indent) printed.add(jar_dep_id)
def classpath(self, cp=None, confs=None): classpath = cp or [] with self.context.state('classpath', []) as cp: classpath.extend(path for conf, path in cp if not confs or conf in confs) def add_resource_paths(predicate): bases = set() for target in self.context.targets(): if predicate(target): if target.target_base not in bases: sibling_resources_base = os.path.join( os.path.dirname(target.target_base), 'resources') classpath.append( os.path.join(get_buildroot(), sibling_resources_base)) bases.add(target.target_base) if self.context.config.getbool('jvm', 'parallel_src_paths', default=False): add_resource_paths(lambda t: is_jvm(t) and not is_test(t)) if self.context.config.getbool('jvm', 'parallel_test_paths', default=False): add_resource_paths(lambda t: is_jvm(t) and is_test(t)) return classpath
def print_deps(printed, dep): if dep not in printed: printed.add(dep) for dependency in dep.resolve(): if is_jvm(dependency): for internal_dependency in dependency.internal_dependencies: print_deps(printed, internal_dependency) for jar in (dependency.jar_dependencies if is_jvm(dependency) else [dependency]): jar_id, internal = self._dep_id(jar) if output_candidate(internal): if jar not in printed: print_dep(jar) printed.add(jar) target_id, _ = self._dep_id(target) dep_id, _ = self._dep_id(dependency) left_id = target_id if self.is_external_only else dep_id if (left_id, jar_id) not in printed: styled = internal and not self.is_internal_only print(' "%s" -> "%s"%s;' % (left_id, jar_id, ' [style="dashed"]' if styled else '')) printed.add((left_id, jar_id))
def exec_doc(dependency): for dep_target in dependency.resolve(): # TODO: document how doc projects get nested # in pants_doc output if is_jvm(dep_target): self.execute_javadoc([dep_target], os.path.join(target_path, doc_target.id, dep_target.id)) if is_doc(dep_target) and not dep_target in targets: self.execute_pantsdoc([dep_target], os.path.join(target_path, doc_target.id, dep_target.id))
def collect_jars(target): if is_jar(target): add_jar(target) elif target.jar_dependencies: for jar in target.jar_dependencies: if jar.rev: add_jar(jar) # Lift jvm target-level excludes up to the global excludes set if is_jvm(target) and target.excludes: excludes.update(target.excludes)
def classpath(self, cp=None, confs=None, exclusives_classpath=None): classpath = cp or [] classpath.extend(path for conf, path in exclusives_classpath if not confs or conf in confs) def add_resource_paths(predicate): bases = set() for target in self.context.targets(): if predicate(target): if target.target_base not in bases: sibling_resources_base = os.path.join(os.path.dirname(target.target_base), "resources") classpath.append(os.path.join(get_buildroot(), sibling_resources_base)) bases.add(target.target_base) if self.context.config.getbool("jvm", "parallel_src_paths", default=False): add_resource_paths(lambda t: is_jvm(t) and not is_test(t)) if self.context.config.getbool("jvm", "parallel_test_paths", default=False): add_resource_paths(lambda t: is_jvm(t) and is_test(t)) return classpath
def classpath(self, cp=None, confs=None): classpath = cp or [] with self.context.state('classpath', []) as cp: classpath.extend(path for conf, path in cp if not confs or conf in confs) def add_resource_paths(predicate): bases = set() for target in self.context.targets(): if predicate(target): if target.target_base not in bases: sibling_resources_base = os.path.join(os.path.dirname(target.target_base), 'resources') classpath.append(os.path.join(get_buildroot(), sibling_resources_base)) bases.add(target.target_base) if self.context.config.getbool('jvm', 'parallel_src_paths', default=False): add_resource_paths(lambda t: is_jvm(t) and not is_test(t)) if self.context.config.getbool('jvm', 'parallel_test_paths', default=False): add_resource_paths(lambda t: is_jvm(t) and is_test(t)) return classpath
def execute(self): target = Target.get(self.address) if all(is_jvm(t) for t in target.resolve()): if self.is_graph: self._print_digraph(target) else: self._print_dependency_tree(target) elif is_python(target): if self.is_internal_only or self.is_external_only or self.is_minimal or self.is_graph: print('Unsupported option for Python target', file=sys.stderr) sys.exit(1) self._print_python_dependencies(target, 0)
def print_deps(printed, dep, indent = 0): dep_id, _ = self._dep_id(dep) if dep_id in printed: if not self.is_minimal: print_dep("*%s" % dep_id, indent) else: if not self.is_external_only: print_dep(dep_id, indent) printed.add(dep_id) indent += 1 if is_jvm(dep): for internal_dep in dep.internal_dependencies: print_deps(printed, internal_dep, indent) if not self.is_internal_only: if is_jvm(dep): for jar_dep in dep.jar_dependencies: jar_dep_id, internal = self._dep_id(jar_dep) if not internal: if jar_dep_id not in printed or (not self.is_minimal and not self.is_external_only): print_dep(jar_dep_id, indent) printed.add(jar_dep_id)
def get_jar_paths(self, ivyinfo, target, conf): jar_paths = OrderedSet() if is_jar_library(target): # Jar library proxies jar dependencies or jvm targets, so the jars are just those of the dependencies. for paths in [ self.get_jar_paths(ivyinfo, dep, conf) for dep in target.dependencies ]: jar_paths.update(paths) elif is_jar_dependency(target): ref = IvyModuleRef(target.org, target.name, target.rev, conf) jar_paths.update(self.get_jar_paths_for_ivy_module(ivyinfo, ref)) elif is_jvm(target): for basedir, jars in self.context.products.get('jars').get(target).items(): jar_paths.update([os.path.join(basedir, jar) for jar in jars]) if self.transitive: for dep in target.dependencies: jar_paths.update(self.get_jar_paths(ivyinfo, dep, conf)) return jar_paths
def get_jar_paths(self, ivyinfo, target, conf): jar_paths = OrderedSet() if is_jar_library(target): # Jar library proxies jar dependencies or jvm targets, so the jars are just those of the dependencies. for paths in [ self.get_jar_paths(ivyinfo, dep, conf) for dep in target.dependencies ]: jar_paths.update(paths) elif is_jar_dependency(target): ref = IvyModuleRef(target.org, target.name, target.rev, conf) jar_paths.update(self.get_jar_paths_for_ivy_module(ivyinfo, ref)) elif is_jvm(target): for basedir, jars in self.context.products.get('jars').get( target).items(): jar_paths.update([os.path.join(basedir, jar) for jar in jars]) if self.transitive: for dep in target.dependencies: jar_paths.update(self.get_jar_paths(ivyinfo, dep, conf)) return jar_paths
def _is_jvm(target): return is_jvm(target) or is_jvm_app(target)
def is_jardependant(target): return is_jar(target) or is_jvm(target)
def create_ant_builds(self, workspace_root, targets, flags, target): if target.id in targets: return targets[target.id] # Link in libraries required by ant targets as needed def add_scaladeps(tgt): scaladeps = target.do_in_context(lambda: JarDependency( org = 'org.scala-lang', name = 'scala-library', rev = '${scala.version}' ).with_sources().resolve()) target.update_dependencies(scaladeps) if is_jvm(target): if not target.sources: target.sources = [ '_not_a_real_file_' ] if isinstance(target, JavaProtobufLibrary): protobufdeps = target.do_in_context(lambda: JarDependency( org = 'com.google.protobuf', name = 'protobuf-java', rev = '${protobuf.library.version}' ).resolve()) target.update_dependencies(protobufdeps) elif isinstance(target, JavaThriftLibrary): def resolve_thriftdeps(): all_deps = [ Pants('3rdparty:commons-lang'), JarDependency(org = 'org.apache.thrift', name = 'libthrift', rev = '${thrift.library.version}'), Pants('3rdparty:slf4j-api'), # finagle thrift extra deps Pants('3rdparty:finagle-core'), Pants('3rdparty:finagle-thrift'), Pants('3rdparty:util'), ] for dep in all_deps: target.update_dependencies(dep.resolve()) target.do_in_context(resolve_thriftdeps) elif isinstance(target, JavaTests): junit = target.do_in_context(lambda: Pants('3rdparty:junit').resolve()) target.update_dependencies(junit) elif isinstance(target, ScalaLibrary): add_scaladeps(target) elif isinstance(target, ScalaTests): add_scaladeps(target) specdeps = target.do_in_context(lambda: JarDependency( org = 'org.scala-tools.testing', name = '${specs.name}', rev = '${specs.version}' ).with_sources().resolve()) target.update_dependencies(specdeps) try: library_template_data = target._create_template_data() except: self.ferror("Problem creating template data for %s(%s): %s" % (type(target).__name__, target.address, traceback.format_exc())) workspace = os.path.join(workspace_root, library_template_data.id) if not os.path.exists(workspace): os.makedirs(workspace) ivyxml = os.path.join(workspace, 'ivy.xml') AntBuilder._generate(self.root_dir, 'ivy', library_template_data, ivyxml) buildxml = os.path.join(workspace, 'build.xml') if target.custom_antxml_path: shutil.copyfile(target.custom_antxml_path, buildxml) pants_buildxml = os.path.join(workspace, 'pants-build.xml') flags.add('-Dpants.build.file=pants-build.xml') else: pants_buildxml = buildxml build_template = os.path.join(library_template_data.template_base, 'build') AntBuilder._generate(self.root_dir, build_template, library_template_data, pants_buildxml) targets[target.id] = buildxml for additional_library in target.internal_dependencies: self.create_ant_builds(workspace_root, targets, flags, additional_library) return buildxml, ivyxml
def _is_resolvable(target): return is_jvm(target)
def create_ant_builds(self, workspace_root, targets, flags, target): if target.id in targets: return targets[target.id] # Link in libraries required by ant targets as needed def add_scaladeps(tgt): scaladeps = target.do_in_context(lambda: JarDependency( org='org.scala-lang', name='scala-library', rev='${scala.version}').with_sources().resolve()) target.update_dependencies(scaladeps) if is_jvm(target): if not target.sources: target.sources = ['_not_a_real_file_'] if isinstance(target, JavaProtobufLibrary): protobufdeps = target.do_in_context(lambda: JarDependency( org='com.google.protobuf', name='protobuf-java', rev='${protobuf.library.version}').resolve()) target.update_dependencies(protobufdeps) elif isinstance(target, JavaThriftLibrary): def resolve_thriftdeps(): all_deps = [ Pants('3rdparty:commons-lang'), JarDependency(org='org.apache.thrift', name='libthrift', rev='${thrift.library.version}'), Pants('3rdparty:slf4j-api'), # finagle thrift extra deps Pants('3rdparty:finagle-core'), Pants('3rdparty:finagle-thrift'), Pants('3rdparty:util'), ] for dep in all_deps: target.update_dependencies(dep.resolve()) target.do_in_context(resolve_thriftdeps) elif isinstance(target, JavaTests): junit = target.do_in_context( lambda: Pants('3rdparty:junit').resolve()) target.update_dependencies(junit) elif isinstance(target, ScalaLibrary): add_scaladeps(target) elif isinstance(target, ScalaTests): add_scaladeps(target) specdeps = target.do_in_context(lambda: JarDependency( org='org.scala-tools.testing', name='${specs.name}', rev='${specs.version}').with_sources().resolve()) target.update_dependencies(specdeps) try: library_template_data = target._create_template_data() except: self.ferror("Problem creating template data for %s(%s): %s" % (type(target).__name__, target.address, traceback.format_exc())) workspace = os.path.join(workspace_root, library_template_data.id) if not os.path.exists(workspace): os.makedirs(workspace) ivyxml = os.path.join(workspace, 'ivy.xml') AntBuilder._generate(self.root_dir, 'ivy', library_template_data, ivyxml) buildxml = os.path.join(workspace, 'build.xml') if target.custom_antxml_path: shutil.copyfile(target.custom_antxml_path, buildxml) pants_buildxml = os.path.join(workspace, 'pants-build.xml') flags.add('-Dpants.build.file=pants-build.xml') else: pants_buildxml = buildxml build_template = os.path.join(library_template_data.template_base, 'build') AntBuilder._generate(self.root_dir, build_template, library_template_data, pants_buildxml) targets[target.id] = buildxml for additional_library in target.internal_dependencies: self.create_ant_builds(workspace_root, targets, flags, additional_library) return buildxml, ivyxml
def extract_target(java_targets, name=None): """Extracts a minimal set of linked targets from the given target's internal transitive dependency set. The root target in the extracted target set is returned. The algorithm does a topological sort of the internal targets and then tries to coalesce targets of a given type. Any target with a custom ant build xml will be excluded from the coalescing.""" # TODO(John Sirois): this is broken - representative_target is not necessarily representative representative_target = list(java_targets)[0] meta_target_base_name = "fast-%s" % (name if name else representative_target.name) provides = None deployjar = hasattr(representative_target, 'deployjar') and representative_target.deployjar buildflags = representative_target.buildflags def discriminator(tgt): # Chunk up our targets by (type, src base) - the javac task in the ant build relies upon a # single srcdir that points to the root of a package tree to ensure differential compilation # works. return type(tgt), tgt.target_base def create_target(category, target_name, target_index, targets): def name(name): return "%s-%s-%d" % (target_name, name, target_index) # TODO(John Sirois): JavaLibrary and ScalaLibrary can float here between src/ and tests/ - add # ant build support to allow the same treatment for JavaThriftLibrary and JavaProtobufLibrary # so that tests can house test IDL in tests/ target_type, base = category if target_type == JavaProtobufLibrary: return JavaProtobufLibrary._aggregate(name('protobuf'), provides, buildflags, targets) elif target_type == JavaThriftLibrary: return JavaThriftLibrary._aggregate(name('thrift'), provides, buildflags, targets) elif target_type == AnnotationProcessor: return AnnotationProcessor._aggregate(name('apt'), provides, targets) elif target_type == JavaLibrary: return JavaLibrary._aggregate(name('java'), provides, deployjar, buildflags, targets, base) elif target_type == ScalaLibrary: return ScalaLibrary._aggregate(name('scala'), provides, deployjar, buildflags, targets, base) elif target_type == JavaTests: return JavaTests._aggregate(name('java-tests'), buildflags, targets) elif target_type == ScalaTests: return ScalaTests._aggregate(name('scala-tests'), buildflags, targets) else: raise Exception("Cannot aggregate targets of type: %s" % target_type) # TODO(John Sirois): support a flag that selects conflict resolution policy - this currently # happens to mirror the ivy policy we use def resolve_conflicts(target): dependencies = {} for dependency in target.resolved_dependencies: for jar in dependency._as_jar_dependencies(): key = jar.org, jar.name previous = dependencies.get(key, jar) if jar.rev >= previous.rev: if jar != previous: print "WARNING: replacing %s with %s for %s" % ( previous, jar, target._id) target.resolved_dependencies.remove(previous) target.jar_dependencies.remove(previous) dependencies[key] = jar return target # chunk up our targets by type & custom build xml coalesced = InternalTarget.coalesce_targets(java_targets, discriminator) coalesced = list(reversed(coalesced)) start_type = discriminator(coalesced[0]) start = 0 descriptors = [] for current in range(0, len(coalesced)): current_target = coalesced[current] current_type = discriminator(current_target) if current_target.custom_antxml_path: if start < current: # if we have a type chunk to our left, record it descriptors.append((start_type, coalesced[start:current])) # record a chunk containing just the target that has the custom build xml to be conservative descriptors.append((current_type, [current_target])) start = current + 1 if current < (len(coalesced) - 1): start_type = discriminator(coalesced[start]) elif start_type != current_type: # record the type chunk we just left descriptors.append((start_type, coalesced[start:current])) start = current start_type = current_type if start < len(coalesced): # record the tail chunk descriptors.append((start_type, coalesced[start:])) # build meta targets aggregated from the chunks and keep track of which targets end up in which # meta targets meta_targets_by_target_id = dict() targets_by_meta_target = [] for (ttype, targets), index in zip(descriptors, reversed(range(0, len(descriptors)))): meta_target = resolve_conflicts( create_target(ttype, meta_target_base_name, index, targets)) targets_by_meta_target.append((meta_target, targets)) for target in targets: meta_targets_by_target_id[target._id] = meta_target # calculate the other meta-targets (if any) each meta-target depends on extra_targets_by_meta_target = [] for meta_target, targets in targets_by_meta_target: meta_deps = set() custom_antxml_path = None for target in targets: if target.custom_antxml_path: custom_antxml_path = target.custom_antxml_path for dep in target.resolved_dependencies: if is_jvm(dep): meta = meta_targets_by_target_id[dep._id] if meta != meta_target: meta_deps.add(meta) extra_targets_by_meta_target.append( (meta_target, meta_deps, custom_antxml_path)) def lift_excludes(meta_target): excludes = set() def lift(target): if target.excludes: excludes.update(target.excludes) for jar_dep in target.jar_dependencies: excludes.update(jar_dep.excludes) for internal_dep in target.internal_dependencies: lift(internal_dep) lift(meta_target) return excludes # link in the extra inter-meta deps meta_targets = [] for meta_target, extra_deps, custom_antxml_path in extra_targets_by_meta_target: meta_targets.append(meta_target) meta_target.update_dependencies(extra_deps) meta_target.excludes = lift_excludes(meta_target) meta_target.custom_antxml_path = custom_antxml_path sorted_meta_targets = InternalTarget.sort_targets(meta_targets) def prune_metas(target): if sorted_meta_targets: try: sorted_meta_targets.remove(target) except ValueError: # we've already removed target in the current walk pass # link any disconnected meta_target graphs so we can return 1 root target root = None while sorted_meta_targets: new_root = sorted_meta_targets[0] new_root.walk(prune_metas, is_jvm) if root: new_root.update_dependencies([root]) root = new_root return root
def extract_target(java_targets, name = None): """Extracts a minimal set of linked targets from the given target's internal transitive dependency set. The root target in the extracted target set is returned. The algorithm does a topological sort of the internal targets and then tries to coalesce targets of a given type. Any target with a custom ant build xml will be excluded from the coalescing.""" # TODO(John Sirois): this is broken - representative_target is not necessarily representative representative_target = list(java_targets)[0] meta_target_base_name = "fast-%s" % (name if name else representative_target.name) provides = None deployjar = hasattr(representative_target, 'deployjar') and representative_target.deployjar buildflags = representative_target.buildflags def discriminator(tgt): # Chunk up our targets by (type, src base) - the javac task in the ant build relies upon a # single srcdir that points to the root of a package tree to ensure differential compilation # works. return type(tgt), tgt.target_base def create_target(category, target_name, target_index, targets): def name(name): return "%s-%s-%d" % (target_name, name, target_index) # TODO(John Sirois): JavaLibrary and ScalaLibrary can float here between src/ and tests/ - add # ant build support to allow the same treatment for JavaThriftLibrary and JavaProtobufLibrary # so that tests can house test IDL in tests/ target_type, base = category if target_type == JavaProtobufLibrary: return JavaProtobufLibrary._aggregate(name('protobuf'), provides, buildflags, targets) elif target_type == JavaThriftLibrary: return JavaThriftLibrary._aggregate(name('thrift'), provides, buildflags, targets) elif target_type == AnnotationProcessor: return AnnotationProcessor._aggregate(name('apt'), provides, targets) elif target_type == JavaLibrary: return JavaLibrary._aggregate(name('java'), provides, deployjar, buildflags, targets, base) elif target_type == ScalaLibrary: return ScalaLibrary._aggregate(name('scala'), provides, deployjar, buildflags, targets, base) elif target_type == JavaTests: return JavaTests._aggregate(name('java-tests'), buildflags, targets) elif target_type == ScalaTests: return ScalaTests._aggregate(name('scala-tests'), buildflags, targets) else: raise Exception("Cannot aggregate targets of type: %s" % target_type) # TODO(John Sirois): support a flag that selects conflict resolution policy - this currently # happens to mirror the ivy policy we use def resolve_conflicts(target): dependencies = {} for dependency in target.resolved_dependencies: for jar in dependency._as_jar_dependencies(): key = jar.org, jar.name previous = dependencies.get(key, jar) if jar.rev >= previous.rev: if jar != previous: print "WARNING: replacing %s with %s for %s" % (previous, jar, target._id) target.resolved_dependencies.remove(previous) target.jar_dependencies.remove(previous) dependencies[key] = jar return target # chunk up our targets by type & custom build xml coalesced = InternalTarget.coalesce_targets(java_targets, discriminator) coalesced = list(reversed(coalesced)) start_type = discriminator(coalesced[0]) start = 0 descriptors = [] for current in range(0, len(coalesced)): current_target = coalesced[current] current_type = discriminator(current_target) if current_target.custom_antxml_path: if start < current: # if we have a type chunk to our left, record it descriptors.append((start_type, coalesced[start:current])) # record a chunk containing just the target that has the custom build xml to be conservative descriptors.append((current_type, [current_target])) start = current + 1 if current < (len(coalesced) - 1): start_type = discriminator(coalesced[start]) elif start_type != current_type: # record the type chunk we just left descriptors.append((start_type, coalesced[start:current])) start = current start_type = current_type if start < len(coalesced): # record the tail chunk descriptors.append((start_type, coalesced[start:])) # build meta targets aggregated from the chunks and keep track of which targets end up in which # meta targets meta_targets_by_target_id = dict() targets_by_meta_target = [] for (ttype, targets), index in zip(descriptors, reversed(range(0, len(descriptors)))): meta_target = resolve_conflicts(create_target(ttype, meta_target_base_name, index, targets)) targets_by_meta_target.append((meta_target, targets)) for target in targets: meta_targets_by_target_id[target._id] = meta_target # calculate the other meta-targets (if any) each meta-target depends on extra_targets_by_meta_target = [] for meta_target, targets in targets_by_meta_target: meta_deps = set() custom_antxml_path = None for target in targets: if target.custom_antxml_path: custom_antxml_path = target.custom_antxml_path for dep in target.resolved_dependencies: if is_jvm(dep): meta = meta_targets_by_target_id[dep._id] if meta != meta_target: meta_deps.add(meta) extra_targets_by_meta_target.append((meta_target, meta_deps, custom_antxml_path)) def lift_excludes(meta_target): excludes = set() def lift(target): if target.excludes: excludes.update(target.excludes) for jar_dep in target.jar_dependencies: excludes.update(jar_dep.excludes) for internal_dep in target.internal_dependencies: lift(internal_dep) lift(meta_target) return excludes # link in the extra inter-meta deps meta_targets = [] for meta_target, extra_deps, custom_antxml_path in extra_targets_by_meta_target: meta_targets.append(meta_target) meta_target.update_dependencies(extra_deps) meta_target.excludes = lift_excludes(meta_target) meta_target.custom_antxml_path = custom_antxml_path sorted_meta_targets = InternalTarget.sort_targets(meta_targets) def prune_metas(target): if sorted_meta_targets: try: sorted_meta_targets.remove(target) except ValueError: # we've already removed target in the current walk pass # link any disconnected meta_target graphs so we can return 1 root target root = None while sorted_meta_targets: new_root = sorted_meta_targets[0] new_root.walk(prune_metas, is_jvm) if root: new_root.update_dependencies([root]) root = new_root return root
def _is_jvm(dep): return is_jvm(dep) or is_jvm_app(dep)