def test_extract_target(self): jar1 = MockTarget('jar1', rev=1) jar2 = MockTarget('jar2', rev=1) jar3 = MockTarget('jar3', rev=1) jar4 = MockTarget('jar4', rev=1) f = MockTarget('f', is_codegen=True) b = MockTarget('b', is_codegen=True, internal_dependencies=[f]) d = MockTarget('d', internal_dependencies=[f], jar_dependencies=[jar1]) e = MockTarget('e', jar_dependencies=[jar2]) # This codegen target has a jar dependency, but it should not be rolled up since the codegen # target itself is grafted into the dep tree c = MockTarget('c', is_codegen=True, internal_dependencies=[d, e], jar_dependencies=[jar3]) a = MockTarget('a', internal_dependencies=[b, c, e], jar_dependencies=[jar4]) internal_deps, jar_deps = _extract_target(a, lambda target: True) self.assertEquals(OrderedSet([b, c]), internal_deps) for dep in internal_deps: self.assertEquals(OrderedSet([f]), dep.internal_dependencies) self.assertEquals(set([jar1, jar2, jar4]), set(jar_deps))
def get_all_deps(): all_deps = OrderedSet([ Pants('3rdparty:scala-library').resolve(), ]) if dependencies: all_deps.update(dependencies) return all_deps
def _create_doc_target(self): all_sources = [] all_deps = OrderedSet() for target in self.targets: if not self.only_provides or is_exported(target): for source in target.sources: source_path = os.path.join(self.java_src_prefix, source) if os.path.exists(source_path): all_sources.append(source_path) else: print "skipping %s" % source_path for jar_dep in target.jar_dependencies: if jar_dep.rev: all_deps.add(copy(jar_dep).intransitive()) def create_meta_target(): return JavaLibrary('pants.doc.deps', all_sources, provides=None, dependencies=all_deps, excludes=None, resources=None, binary_resources=None, deployjar=False, buildflags=None, is_meta=True) # TODO(John Sirois): Find a better way to do_in_context when we don't care about the context return list(self.targets)[0].do_in_context(create_meta_target)
def _create_doc_target(self): all_sources = [] all_deps = OrderedSet() for target in self.targets: if not self.only_provides or is_exported(target): for source in target.sources: source_path = os.path.join(self.java_src_prefix, source) if os.path.exists(source_path): all_sources.append(source_path) else: print "skipping %s" % source_path for jar_dep in target.jar_dependencies: if jar_dep.rev: all_deps.add(copy(jar_dep).intransitive()) def create_meta_target(): return JavaLibrary('pants.doc.deps', all_sources, provides = None, dependencies = all_deps, excludes = None, resources = None, binary_resources = None, deployjar = False, buildflags = None, is_meta = True) # TODO(John Sirois): Find a better way to do_in_context when we don't care about the context return list(self.targets)[0].do_in_context(create_meta_target)
def _run_tests(self, targets, args): testargs = OrderedSet([ 'py.test' ]) if args: testargs.update(args) def add_tests(template_data): if template_data.sources: basedir = template_data.template_base testargs.update(os.path.join(basedir, test) for test in template_data.sources) if template_data.dependencies: for dependency in template_data.dependencies: for dep in dependency.resolve(): add_tests(dep._create_template_data()) for target in targets: template_data = target._create_template_data() add_tests(template_data) print 'PythonBuilder executing (PYTHONPATH="%s") %s' % ( os.environ['PYTHONPATH'], ' '.join(testargs) ) return subprocess.call(testargs, cwd = self.root_dir)
def get_all_deps(): all_deps = OrderedSet([ Pants('src/scala/com/twitter/common/testing:explicit-specs-runner').resolve(), Pants('3rdparty:scala-library').resolve(), ]) if dependencies: all_deps.update(dependencies) return all_deps
def scan_addresses(root_dir, base_path = None): """Parses all targets available in BUILD files under base_path and returns their addresses. If no base_path is specified, root_dir is assumed to be the base_path""" addresses = OrderedSet() for buildfile in BuildFile.scan_buildfiles(root_dir, base_path): addresses.update(Target.get_all_addresses(buildfile)) return addresses
def scan_addresses(root_dir, base_path=None): """Parses all targets available in BUILD files under base_path and returns their addresses. If no base_path is specified, root_dir is assumed to be the base_path""" addresses = OrderedSet() for buildfile in BuildFile.scan_buildfiles(root_dir, base_path): addresses.update(Target.get_all_addresses(buildfile)) return addresses
def get_all_deps(): all_deps = OrderedSet() all_deps.update(Pants('3rdparty:commons-lang').resolve()) all_deps.update(JarDependency(org = 'org.apache.thrift', name = 'libthrift', rev = '${thrift.library.version}').resolve()) all_deps.update(Pants('3rdparty:slf4j-api').resolve()) if dependencies: all_deps.update(dependencies) return all_deps
def scan_buildfiles(cls, root_dir, base_path = None): """Looks for all BUILD files under base_path""" buildfiles = OrderedSet() for root, dirs, files in os.walk(base_path if base_path else root_dir): for filename in files: if BuildFile._is_buildfile_name(filename): buildfile_relpath = os.path.relpath(os.path.join(root, filename), root_dir) buildfiles.add(BuildFile(root_dir, buildfile_relpath)) return buildfiles
def get_all_deps(): all_deps = OrderedSet([ Pants('3rdparty:commons-lang').resolve(), JarDependency(org = 'org.apache.thrift', name = 'libthrift', rev = '${thrift.library.version}'), Pants('3rdparty:slf4j-api').resolve(), ]) if dependencies: all_deps.update(dependencies) return all_deps
def _parse_buildfiles(self): buildfiles = OrderedSet() for spec in self.args: try: if self.options.is_directory_list: for buildfile in BuildFile.scan_buildfiles(self.root_dir, spec): buildfiles.add(buildfile) else: buildfiles.add(Address.parse(self.root_dir, spec).buildfile) except: self.error("Problem parsing spec %s: %s" % (spec, traceback.format_exc())) return buildfiles
def __init__(self, name, is_codegen=False, internal_dependencies=None, jar_dependencies=None, rev=None): self.name = name self.is_codegen = is_codegen self.internal_dependencies = OrderedSet(internal_dependencies) self.jar_dependencies = OrderedSet(jar_dependencies) self.excludes = [] self.rev = rev
def _parse_addresses(self): addresses = OrderedSet() for spec in self.args: try: if self.options.is_directory_list: for address in scan_addresses(self.root_dir, spec): addresses.add(address) else: addresses.add(Address.parse(self.root_dir, spec)) except: self.error("Problem parsing spec %s: %s" % (spec, traceback.format_exc())) return addresses
def __init__(self, name, *dependencies): """name: The name of this module target, addressable via pants via the portion of the spec following the colon dependencies: one or more JarDependencies this JarLibrary bundles or Pants pointing to other JarLibraries""" assert len(dependencies) > 0, "At least one dependency must be specified" Target.__init__(self, name, False) self.jar_dependencies = OrderedSet() for dependency in dependencies: self.jar_dependencies.update((dependency.resolve())._as_jar_dependencies())
def _aggregate(cls, name, provides, deployjar, buildflags, scala_libs): all_deps = OrderedSet() all_excludes = OrderedSet() all_sources = [] all_java_sources = [] all_resources = [] all_binary_resources = [] for scala_lib in scala_libs: if scala_lib.resolved_dependencies: all_deps.update(dep for dep in scala_lib.jar_dependencies if dep.rev is not None) if scala_lib.excludes: all_excludes.update(scala_lib.excludes) if scala_lib.sources: all_sources.extend(scala_lib.sources) if scala_lib.java_sources: all_java_sources.extend(scala_lib.java_sources) if scala_lib.resources: all_resources.extend(scala_lib.resources) if scala_lib.binary_resources: all_binary_resources.extend(scala_lib.binary_resources) return ScalaLibrary(name, all_sources, java_sources = all_java_sources, provides = provides, dependencies = all_deps, excludes = all_excludes, resources = all_resources, binary_resources = all_binary_resources, deployjar = deployjar, buildflags = buildflags, is_meta = True)
def __init__(self, root_dir, parser, argv): Command.__init__(self, root_dir, parser, argv) if not self.args: self.error("A spec argument is required") try: specs_end = self.args.index('--') if len(self.args) > specs_end: self.build_args = self.args.__getslice__( specs_end + 1, len(self.args) + 1) else: self.build_args = [] except ValueError: specs_end = 1 self.build_args = self.args[1:] if len(self.args) > 1 else [] self.targets = OrderedSet() for spec in self.args.__getslice__(0, specs_end): try: address = Address.parse(root_dir, spec) except: self.error("Problem parsing spec %s: %s" % (spec, traceback.format_exc())) try: target = Target.get(address) except: self.error("Problem parsing BUILD target %s: %s" % (address, traceback.format_exc())) try: InternalTarget.check_cycles(target) except CycleException as e: self.error("Target contains an internal dependency cycle: %s" % e) if not target: self.error("Target %s does not exist" % address) if not target.address.is_meta: target.address.is_meta = self.options.is_meta or address.is_meta self.targets.add(target) self.is_ide = self.options.is_ide self.ide_transitivity = self.options.ide_transitivity
def _parse_targets(self, root_dir): targets = OrderedSet() for spec in self.args: try: address = Address.parse(root_dir, spec) except: self.error("Problem parsing spec %s: %s" % (spec, traceback.format_exc())) try: target = Target.get(address) except: self.error("Problem parsing target %s: %s" % (address, traceback.format_exc())) if not Doc._is_documentable(target): self.error("Target: %s is not documentable" % address) targets.add(target) return targets
class JarLibrary(Target): """Serves as a proxy for one or more JarDependencies.""" def __init__(self, name, *dependencies): """name: The name of this module target, addressable via pants via the portion of the spec following the colon dependencies: one or more JarDependencies this JarLibrary bundles or Pants pointing to other JarLibraries""" assert len(dependencies) > 0, "At least one dependency must be specified" Target.__init__(self, name, False) self.jar_dependencies = OrderedSet() for dependency in dependencies: self.jar_dependencies.update((dependency.resolve())._as_jar_dependencies()) def _as_jar_dependencies(self): for jar in self.jar_dependencies: yield jar
def execute(self): jvm_targets = OrderedSet(target for target in self.targets if is_jvm(target)) if not jvm_targets: raise Exception( "Only jvm targets currently handled and none found in: %s" % self.targets) project = Project(self.project_name, self.options.python, self.root_dir, jvm_targets) all_targets = project.configure() ivyfile, ivysettingsfile = self._generate_ivy(all_targets) self._generate_project_files(project, ivyfile, ivysettingsfile)
def extract_target(java_target, is_transitive): meta_target = bang.extract_target(java_target) internal_deps, jar_deps = _extract_target(meta_target, is_transitive) # TODO(John Sirois): make an empty source set work in ant/compile.xml sources = [ '__no_source__' ] all_deps = OrderedSet() all_deps.update(internal_deps) all_deps.update(jar_deps) return JavaLibrary('ide', sources, provides = None, dependencies = all_deps, excludes = meta_target.excludes, resources = None, binary_resources = None, deployjar = False, buildflags = None, is_meta = True)
def __init__(self, name, dependencies, is_meta): Target.__init__(self, name, is_meta) self.resolved_dependencies = OrderedSet() self.internal_dependencies = OrderedSet() self.jar_dependencies = OrderedSet() self.update_dependencies(dependencies)
def __init__(self, name, has_python, root_dir, targets): """Creates a new, unconfigured, Project based at root_dir and comprised of the sources visible to the given targets.""" self.name = name self.root_dir = root_dir self.targets = OrderedSet(targets) self.sources = [] self.resource_extensions = set() self.has_python = has_python self.has_scala = False self.has_tests = False self.extra_checkstyle_suppression_files = [ ] # Paths relative to the build root.
def check_cycles(cls, internal_target): """Validates the given InternalTarget has no circular dependencies. Raises CycleException if it does.""" dep_stack = OrderedSet() def descend(internal_dep): if internal_dep in dep_stack: raise CycleException(dep_stack, internal_dep) if hasattr(internal_dep, 'internal_dependencies'): dep_stack.add(internal_dep) for dep in internal_dep.internal_dependencies: descend(dep) dep_stack.remove(internal_dep) descend(internal_target)
def _aggregate(cls, name, buildflags, scala_tests): all_deps = OrderedSet() all_excludes = OrderedSet() all_sources = [] for scala_test in scala_tests: if scala_test.resolved_dependencies: all_deps.update(dep for dep in scala_test.jar_dependencies if dep.rev is not None) if scala_test.excludes: all_excludes.update(scala_test.excludes) if scala_test.sources: all_sources.extend(scala_test.sources) return ScalaTests(name, all_sources, dependencies = all_deps, buildflags = buildflags, is_meta = True)
def _aggregate(cls, name, provides, buildflags, java_proto_libs): all_sources = [] all_deps = OrderedSet() all_excludes = OrderedSet() for java_proto_lib in java_proto_libs: if java_proto_lib.sources: all_sources.extend(java_proto_lib.sources) if java_proto_lib.resolved_dependencies: all_deps.update(dep for dep in java_proto_lib.jar_dependencies if dep.rev is not None) if java_proto_lib.excludes: all_excludes.update(java_proto_lib.excludes) return JavaProtobufLibrary(name, all_sources, provides = provides, dependencies = all_deps, excludes = all_excludes, buildflags = buildflags, is_meta = True)
def _extract_target(meta_target, is_transitive): class RootNode(object): def __init__(self): self.internal_dependencies = OrderedSet() root_target = RootNode() codegen_graph = deque([]) codegen_graph.appendleft(root_target) jar_deps = OrderedSet() visited = set() def sift_targets(target): if target not in visited: visited.add(target) if target.is_codegen: codegen_graph[0].internal_dependencies.add(target) else: for jar_dependency in target.jar_dependencies: if jar_dependency.rev: if is_transitive(target): jar_deps.add(jar_dependency) else: jar_deps.add(copy(jar_dependency).intransitive()) if target.is_codegen: codegen_graph.appendleft(target) for internal_target in list(target.internal_dependencies): target.internal_dependencies.discard(internal_target) sift_targets(internal_target) if target.is_codegen: codegen_graph.popleft() sift_targets(meta_target) assert len(codegen_graph) == 1 and codegen_graph[0] == root_target,\ "Unexpected walk: %s" % codegen_graph return codegen_graph.popleft().internal_dependencies, jar_deps
def __init__(self, name, dependencies, is_meta): Target.__init__(self, name, is_meta) self.resolved_dependencies = OrderedSet() self.internal_dependencies = OrderedSet() self.jar_dependencies = OrderedSet() if dependencies: for dependency in dependencies: resolved_dependency = dependency.resolve() self.resolved_dependencies.add(resolved_dependency) if isinstance(resolved_dependency, InternalTarget): self.internal_dependencies.add(resolved_dependency) self.jar_dependencies.update(resolved_dependency._as_jar_dependencies())
def execute(self): print "Build operating on targets: %s" % self.targets jvm_targets = OrderedSet() python_targets = OrderedSet() for target in self.targets: if is_jvm(target): jvm_targets.add(target) elif is_python(target): python_targets.add(target) else: self.error("Cannot build target %s" % target) if jvm_targets: status = self._jvm_build(jvm_targets) if status != 0: return status if python_targets: status = self._python_build(python_targets) return status
def __init__(self, root_dir, parser, argv): Command.__init__(self, root_dir, parser, argv) if not self.args: self.error("A spec argument is required") try: specs_end = self.args.index('--') if len(self.args) > specs_end: self.build_args = self.args.__getslice__(specs_end + 1, len(self.args) + 1) else: self.build_args = [] except ValueError: specs_end = 1 self.build_args = self.args[1:] if len(self.args) > 1 else [] self.targets = OrderedSet() for spec in self.args.__getslice__(0, specs_end): try: address = Address.parse(root_dir, spec) except: self.error("Problem parsing spec %s: %s" % (spec, traceback.format_exc())) try: target = Target.get(address) except: self.error("Problem parsing BUILD target %s: %s" % (address, traceback.format_exc())) try: InternalTarget.check_cycles(target) except CycleException as e: self.error("Target contains an internal dependency cycle: %s" % e) if not target: self.error("Target %s does not exist" % address) if not target.address.is_meta: target.address.is_meta = self.options.is_meta or address.is_meta self.targets.add(target) self.is_ide = self.options.is_ide self.ide_transitivity = self.options.ide_transitivity
def sort_targets(cls, internal_targets): """Returns a list of targets that internal_targets depend on sorted from most dependent to least.""" roots = OrderedSet() inverted_deps = collections.defaultdict(OrderedSet) # target -> dependent targets visited = set() def invert(target): if target not in visited: visited.add(target) if target.internal_dependencies: for internal_dependency in target.internal_dependencies: if isinstance(internal_dependency, InternalTarget): inverted_deps[internal_dependency].add(target) invert(internal_dependency) else: roots.add(target) for internal_target in internal_targets: invert(internal_target) sorted = [] visited.clear() def topological_sort(target): if target not in visited: visited.add(target) if target in inverted_deps: for dep in inverted_deps[target]: topological_sort(dep) sorted.append(target) for root in roots: topological_sort(root) return sorted
def extract_target(java_targets, is_transitive, name=None): meta_target = bang.extract_target(java_targets, name) internal_deps, jar_deps = _extract_target(meta_target, is_transitive) # TODO(John Sirois): make an empty source set work in ant/compile.xml sources = ['__no_source__'] all_deps = OrderedSet() all_deps.update(internal_deps) all_deps.update(jar_deps) return JavaLibrary('ide', sources, provides=None, dependencies=all_deps, excludes=meta_target.excludes, resources=None, binary_resources=None, deployjar=False, buildflags=None, is_meta=True)
class Build(Command): """Builds a specified target.""" def setup_parser(self, parser): parser.set_usage("\n" " %prog build (options) [spec] (build args)\n" " %prog build (options) [spec]... -- (build args)") parser.disable_interspersed_args() parser.add_option("--fast", action="store_true", dest = "is_meta", default = False, help = "Specifies the build should be flattened before executing, this can " "help speed up many builds. Equivalent to the ! suffix BUILD target " "modifier") parser.add_option("--ide", action="store_true", dest = "is_ide", default = False, help = "Specifies the build should just do enough to get an IDE usable.") parser.add_option("-t", "--ide-transitivity", dest = "ide_transitivity", type = "choice", choices = _TRANSITIVITY_CHOICES, default = TRANSITIVITY_TESTS, help = "[%%default] Specifies IDE dependencies should be transitive for one " "of: %s" % _TRANSITIVITY_CHOICES) parser.add_option("-q", "--quiet", action="store_true", dest = "quiet", default = False, help = "Don't output result of empty targets") parser.epilog = """Builds the specified target(s). Currently any additional arguments are passed straight through to the ant build system.""" def __init__(self, root_dir, parser, argv): Command.__init__(self, root_dir, parser, argv) if not self.args: self.error("A spec argument is required") try: specs_end = self.args.index('--') if len(self.args) > specs_end: self.build_args = self.args.__getslice__(specs_end + 1, len(self.args) + 1) else: self.build_args = [] except ValueError: specs_end = 1 self.build_args = self.args[1:] if len(self.args) > 1 else [] self.targets = OrderedSet() for spec in self.args.__getslice__(0, specs_end): try: address = Address.parse(root_dir, spec) except: self.error("Problem parsing spec %s: %s" % (spec, traceback.format_exc())) try: target = Target.get(address) except: self.error("Problem parsing BUILD target %s: %s" % (address, traceback.format_exc())) try: InternalTarget.check_cycles(target) except CycleException as e: self.error("Target contains an internal dependency cycle: %s" % e) if not target: self.error("Target %s does not exist" % address) if not target.address.is_meta: target.address.is_meta = self.options.is_meta or address.is_meta self.targets.add(target) self.is_ide = self.options.is_ide self.ide_transitivity = self.options.ide_transitivity def execute(self): print "Build operating on targets: %s" % self.targets jvm_targets = OrderedSet() python_targets = OrderedSet() for target in self.targets: if is_jvm(target): jvm_targets.add(target) elif is_python(target): python_targets.add(target) else: self.error("Cannot build target %s" % target) if jvm_targets: status = self._jvm_build(jvm_targets) if status != 0: return status if python_targets: status = self._python_build(python_targets) return status def _jvm_build(self, targets): try: # TODO(John Sirois): think about moving away from the ant backend executor = AntBuilder(self.error, self.root_dir, self.is_ide, self.ide_transitivity) if self.options.quiet: self.build_args.insert(0, "-logger") self.build_args.insert(1, "org.apache.tools.ant.NoBannerLogger") self.build_args.insert(2, "-q") return executor.build(targets, self.build_args) except: self.error("Problem executing AntBuilder for targets %s: %s" % (targets, traceback.format_exc())) def _python_build(self, targets): try: executor = PythonBuilder(self.error, self.root_dir) return executor.build(targets, self.build_args) except: self.error("Problem executing PythonBuilder for targets %s: %s" % (targets, traceback.format_exc()))
def __init__(self): self.internal_dependencies = OrderedSet()
class InternalTarget(Target): """A baseclass for targets that support an optional dependency set.""" def sort(self): """Returns a list of targets this target depends on sorted from most dependent to least.""" roots = dict() # address -> root target inverted_deps = collections.defaultdict(OrderedSet) # address -> dependent targets visited = set() # addresses def invert(target): if target.address not in visited: visited.add(target.address) if target.internal_dependencies: for internal_dependency in target.internal_dependencies: if isinstance(internal_dependency, InternalTarget): inverted_deps[internal_dependency.address].add(target) invert(internal_dependency) else: roots[target.address] = target invert(self) sorted = [] visited.clear() def topological_sort(target): if target.address not in visited: visited.add(target.address) if target.address in inverted_deps: for dep in inverted_deps[target.address]: topological_sort(dep) sorted.append(target) for root in roots.values(): topological_sort(root) return sorted def coalesce(self): """Returns a list of targets this target depends on sorted from most dependent to least and grouped where possible by target type.""" sorted_targets = self.sort() # can do no better for any of these: # [] # [a] # [a,b] if len(sorted_targets) <= 2: return sorted_targets # For these, we'd like to coalesce if possible, like: # [a,b,a,c,a,c] -> [a,a,a,b,c,c] # adopt a quadratic worst case solution, when we find a type change edge, scan forward for # the opposite edge and then try to swap dependency pairs to move the type back left to its # grouping. If the leftwards migration fails due to a dependency constraint, we just stop # and move on leaving "type islands". current_type = type(self) # main scan left to right no backtracking for i in range(len(sorted_targets) - 1): current_target = sorted_targets[i] if current_type != type(current_target): scanned_back = False # scan ahead for next type match for j in range(i + 1, len(sorted_targets)): look_ahead_target = sorted_targets[j] if current_type == type(look_ahead_target): scanned_back = True # swap this guy as far back as we can for k in range(j, i, -1): previous_target = sorted_targets[k - 1] mismatching_types = current_type != type(previous_target) not_a_dependency = look_ahead_target not in previous_target.internal_dependencies if mismatching_types and not_a_dependency: sorted_targets[k] = sorted_targets[k - 1] sorted_targets[k - 1] = look_ahead_target else: break # out of k break # out of j if not scanned_back: # done with coalescing the current type, move on to next current_type = type(current_target) return sorted_targets def __init__(self, name, dependencies, is_meta): Target.__init__(self, name, is_meta) self.resolved_dependencies = OrderedSet() self.internal_dependencies = OrderedSet() self.jar_dependencies = OrderedSet() if dependencies: for dependency in dependencies: resolved_dependency = dependency.resolve() self.resolved_dependencies.add(resolved_dependency) if isinstance(resolved_dependency, InternalTarget): self.internal_dependencies.add(resolved_dependency) self.jar_dependencies.update(resolved_dependency._as_jar_dependencies())
def get_all_deps(): all_deps = OrderedSet() all_deps.update(Pants('3rdparty:junit').resolve()) if dependencies: all_deps.update(dependencies) return all_deps
def __init__(self, target_base, name, sources, dependencies = None, is_meta = False): TargetWithSources.__init__(self, target_base, name, is_meta) self.sources = self._resolve_paths(target_base, sources) self.dependencies = dependencies if dependencies else OrderedSet()
def configure(self): """Configures this project's source sets returning the full set of targets the project is comprised of. The full set can be larger than the initial set of targets when any of the initial targets only has partial ownership of its parent directory source set.""" analyzed = OrderedSet() targeted = set() def accept_target(target): return has_sources(target) and not target.is_codegen def configure_source_sets(relative_base, sources, is_test): absolute_base = os.path.join(self.root_dir, relative_base) paths = set([ os.path.dirname(source) for source in sources]) for path in paths: absolute_path = os.path.join(absolute_base, path) if absolute_path not in targeted: targeted.add(absolute_path) self.sources.append(SourceSet(self.root_dir, relative_base, path, is_test)) def configure_target(target): if target not in analyzed: analyzed.add(target) self.has_scala = self.has_scala or is_scala(target) if isinstance(target, JavaLibrary) or isinstance(target, ScalaLibrary): # TODO(John Sirois): this does not handle test resources, make test resources 1st class # in ant build and punch this through to pants model resources = set() if target.resources: resources.update(target.resources) if target.binary_resources: resources.update(target.binary_resources) if resources: self.resource_extensions.update(Project.extract_resource_extensions(resources)) configure_source_sets(RESOURCES_BASE_DIR, resources, is_test = False) if target.sources: test = is_test(target) self.has_tests = self.has_tests or test configure_source_sets(target.target_base, target.sources, is_test = test) siblings = Target.get_all_addresses(target.address.buildfile) return filter(accept_target, [ Target.get(a) for a in siblings if a != target.address ]) for target in self.targets: target.walk(configure_target, predicate = accept_target) for source_set in self.sources: paths = set() source_base = os.path.join(self.root_dir, source_set.source_base) for root, dirs, _ in os.walk(os.path.join(source_base, source_set.path)): if dirs: paths.update([ os.path.join(root, dir) for dir in dirs ]) unused_children = paths - targeted if unused_children and paths != unused_children: source_set.excludes.extend(os.path.relpath(child, source_base) for child in unused_children) targets = OrderedSet() for target in self.targets: target.walk(lambda target: targets.add(target), has_sources) targets.update(analyzed - targets) return targets
def configure(self): """Configures this project's source sets returning the full set of targets the project is comprised of. The full set can be larger than the initial set of targets when any of the initial targets only has partial ownership of its parent directory source set.""" analyzed = OrderedSet() targeted = set() def accept_target(target): return has_sources(target) and not target.is_codegen def configure_source_sets(relative_base, sources, is_test): absolute_base = os.path.join(self.root_dir, relative_base) paths = set([os.path.dirname(source) for source in sources]) for path in paths: absolute_path = os.path.join(absolute_base, path) if absolute_path not in targeted: targeted.add(absolute_path) self.sources.append( SourceSet(self.root_dir, relative_base, path, is_test)) def configure_target(target): if target not in analyzed: analyzed.add(target) self.has_scala = self.has_scala or is_scala(target) if isinstance(target, JavaLibrary) or isinstance( target, ScalaLibrary): # TODO(John Sirois): this does not handle test resources, make test resources 1st class # in ant build and punch this through to pants model resources = set() if target.resources: resources.update(target.resources) if target.binary_resources: resources.update(target.binary_resources) if resources: self.resource_extensions.update( Project.extract_resource_extensions(resources)) configure_source_sets(RESOURCES_BASE_DIR, resources, is_test=False) if target.sources: test = is_test(target) self.has_tests = self.has_tests or test configure_source_sets(target.target_base, target.sources, is_test=test) siblings = Target.get_all_addresses(target.address.buildfile) return filter( accept_target, [Target.get(a) for a in siblings if a != target.address]) for target in self.targets: target.walk(configure_target, predicate=accept_target) for source_set in self.sources: paths = set() source_base = os.path.join(self.root_dir, source_set.source_base) for root, dirs, _ in os.walk( os.path.join(source_base, source_set.path)): if dirs: paths.update([os.path.join(root, dir) for dir in dirs]) unused_children = paths - targeted if unused_children and paths != unused_children: source_set.excludes.extend( os.path.relpath(child, source_base) for child in unused_children) targets = OrderedSet() for target in self.targets: target.walk(lambda target: targets.add(target), has_sources) targets.update(analyzed - targets) return targets
class InternalTarget(Target): """A baseclass for targets that support an optional dependency set.""" @classmethod def check_cycles(cls, internal_target): """Validates the given InternalTarget has no circular dependencies. Raises CycleException if it does.""" dep_stack = OrderedSet() def descend(internal_dep): if internal_dep in dep_stack: raise CycleException(dep_stack, internal_dep) if hasattr(internal_dep, 'internal_dependencies'): dep_stack.add(internal_dep) for dep in internal_dep.internal_dependencies: descend(dep) dep_stack.remove(internal_dep) descend(internal_target) @classmethod def sort_targets(cls, internal_targets): """Returns a list of targets that internal_targets depend on sorted from most dependent to least.""" roots = OrderedSet() inverted_deps = collections.defaultdict(OrderedSet) # target -> dependent targets visited = set() def invert(target): if target not in visited: visited.add(target) if target.internal_dependencies: for internal_dependency in target.internal_dependencies: if isinstance(internal_dependency, InternalTarget): inverted_deps[internal_dependency].add(target) invert(internal_dependency) else: roots.add(target) for internal_target in internal_targets: invert(internal_target) sorted = [] visited.clear() def topological_sort(target): if target not in visited: visited.add(target) if target in inverted_deps: for dep in inverted_deps[target]: topological_sort(dep) sorted.append(target) for root in roots: topological_sort(root) return sorted @classmethod def coalesce_targets(cls, internal_targets): """Returns a list of targets internal_targets depend on sorted from most dependent to least and grouped where possible by target type.""" sorted_targets = InternalTarget.sort_targets(internal_targets) # can do no better for any of these: # [] # [a] # [a,b] if len(sorted_targets) <= 2: return sorted_targets # For these, we'd like to coalesce if possible, like: # [a,b,a,c,a,c] -> [a,a,a,b,c,c] # adopt a quadratic worst case solution, when we find a type change edge, scan forward for # the opposite edge and then try to swap dependency pairs to move the type back left to its # grouping. If the leftwards migration fails due to a dependency constraint, we just stop # and move on leaving "type islands". current_type = None # main scan left to right no backtracking for i in range(len(sorted_targets) - 1): current_target = sorted_targets[i] if current_type != type(current_target): scanned_back = False # scan ahead for next type match for j in range(i + 1, len(sorted_targets)): look_ahead_target = sorted_targets[j] if current_type == type(look_ahead_target): scanned_back = True # swap this guy as far back as we can for k in range(j, i, -1): previous_target = sorted_targets[k - 1] mismatching_types = current_type != type(previous_target) not_a_dependency = look_ahead_target not in previous_target.internal_dependencies if mismatching_types and not_a_dependency: sorted_targets[k] = sorted_targets[k - 1] sorted_targets[k - 1] = look_ahead_target else: break # out of k break # out of j if not scanned_back: # done with coalescing the current type, move on to next current_type = type(current_target) return sorted_targets def sort(self): """Returns a list of targets this target depends on sorted from most dependent to least.""" return InternalTarget.sort_targets([ self ]) def coalesce(self): """Returns a list of targets this target depends on sorted from most dependent to least and grouped where possible by target type.""" return InternalTarget.coalesce_targets([ self ]) def __init__(self, name, dependencies, is_meta): Target.__init__(self, name, is_meta) self.resolved_dependencies = OrderedSet() self.internal_dependencies = OrderedSet() self.jar_dependencies = OrderedSet() self.update_dependencies(dependencies) def update_dependencies(self, dependencies): if dependencies: for dependency in dependencies: for resolved_dependency in dependency.resolve(): self.resolved_dependencies.add(resolved_dependency) if isinstance(resolved_dependency, InternalTarget): self.internal_dependencies.add(resolved_dependency) self.jar_dependencies.update(resolved_dependency._as_jar_dependencies()) def _walk(self, walked, work, predicate = None): Target._walk(self, walked, work, predicate) for dep in self.resolved_dependencies: if isinstance(dep, Target) and not dep in walked: walked.add(dep) if not predicate or predicate(dep): additional_targets = work(dep) dep._walk(walked, work, predicate) if additional_targets: for additional_target in additional_targets: additional_target._walk(walked, work, predicate)