Beispiel #1
0
def extract_target(java_targets, is_transitive, is_classpath, name = None):
  meta_target = bang.extract_target(java_targets, name)

  internal_deps, jar_deps = _extract_target(meta_target, is_transitive, is_classpath)

  # TODO(John Sirois): make an empty source set work in ant/compile.xml
  sources = [ '__no_source__' ]

  all_deps = OrderedSet()
  all_deps.update(internal_deps)
  all_deps.update(jar_deps)

  if is_java(meta_target):
    return JavaLibrary('ide',
                       sources,
                       dependencies = all_deps,
                       excludes = meta_target.excludes,
                       is_meta = True)
  elif is_scala(meta_target):
    return ScalaLibrary('ide',
                        sources,
                        dependencies = all_deps,
                        excludes = meta_target.excludes,
                        is_meta = True)
  else:
    raise TypeError("Cannot generate IDE configuration for targets: %s" % java_targets)
Beispiel #2
0
  def console_output(self, _):
    buildfiles = OrderedSet()
    if self._dependees_type:
      base_paths = OrderedSet()
      for dependees_type in self._dependees_type:
        # FIXME(pl): This should be a standard function provided by the plugin/BuildFileParser
        # machinery
        try:
          # Try to do a fully qualified import 1st for filtering on custom types.
          from_list, module, type_name = dependees_type.rsplit('.', 2)
          module = __import__('%s.%s' % (from_list, module), fromlist=[from_list])
          target_type = getattr(module, type_name)
        except (ImportError, ValueError):
          # Fall back on pants provided target types.
          registered_aliases = self.context.build_file_parser.registered_aliases()
          if dependees_type not in registered_aliases.targets:
            raise TaskError('Invalid type name: %s' % dependees_type)
          target_type = registered_aliases.targets[dependees_type]

        # Try to find the SourceRoot for the given input type
        try:
          roots = SourceRoot.roots(target_type)
          base_paths.update(roots)
        except KeyError:
          pass

      if not base_paths:
        raise TaskError('No SourceRoot set for any target type in %s.' % self._dependees_type +
                        '\nPlease define a source root in BUILD file as:' +
                        '\n\tsource_root(\'<src-folder>\', %s)' % ', '.join(self._dependees_type))
      for base_path in base_paths:
        buildfiles.update(BuildFile.scan_buildfiles(get_buildroot(),
                                                    os.path.join(get_buildroot(), base_path)))
    else:
      buildfiles = BuildFile.scan_buildfiles(get_buildroot())

    build_graph = self.context.build_graph
    build_file_parser = self.context.build_file_parser

    dependees_by_target = defaultdict(set)
    for build_file in buildfiles:
      build_file_parser.parse_build_file(build_file)
      for address in build_file_parser.addresses_by_build_file[build_file]:
        build_file_parser.inject_spec_closure_into_build_graph(address.spec, build_graph)
      for address in build_file_parser.addresses_by_build_file[build_file]:
        target = build_graph.get_target(address)
        # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a
        # user vs. targets created by pants at runtime.
        target = self.get_concrete_target(target)
        for dependency in target.dependencies:
          dependency = self.get_concrete_target(dependency)
          dependees_by_target[dependency].add(target)

    roots = set(self.context.target_roots)
    if self._closed:
      for root in roots:
        yield root.address.spec

    for dependant in self.get_dependants(dependees_by_target, roots):
      yield dependant.address.spec
Beispiel #3
0
  def minimum_path(cls):
    """
      Return as a tuple the emulated sys.path and sys.path_importer_cache of
      a bare python installation, a la python -S.
    """
    site_libs = set(cls._site_libs())
    for site_lib in site_libs:
      TRACER.log('Found site-library: %s' % site_lib)
    for extras_path in cls._extras_paths():
      TRACER.log('Found site extra: %s' % extras_path)
      site_libs.add(extras_path)
    site_libs = set(os.path.normpath(path) for path in site_libs)

    site_distributions = OrderedSet()
    for path_element in sys.path:
      if any(path_element.startswith(site_lib) for site_lib in site_libs):
        TRACER.log('Inspecting path element: %s' % path_element)
        site_distributions.update(dist.location for dist in find_distributions(path_element))

    user_site_distributions = OrderedSet(dist.location for dist in find_distributions(USER_SITE))

    for path in site_distributions:
      TRACER.log('Scrubbing from site-packages: %s' % path)
    for path in user_site_distributions:
      TRACER.log('Scrubbing from user site: %s' % path)

    scrub_paths = site_distributions | user_site_distributions
    scrubbed_sys_path = list(OrderedSet(sys.path) - scrub_paths)
    scrub_from_importer_cache = filter(
      lambda key: any(key.startswith(path) for path in scrub_paths),
      sys.path_importer_cache.keys())
    scrubbed_importer_cache = dict((key, value) for (key, value) in sys.path_importer_cache.items()
      if key not in scrub_from_importer_cache)
    return scrubbed_sys_path, scrubbed_importer_cache
  def _format_args_for_target(self, target, target_workdir):
    """Calculate the arguments to pass to the command line for a single target."""

    relative_proto_files = OrderedSet()
    if target.payload.proto_files:
      relative_proto_files.update(target.payload.proto_files)
    else:
      sources = OrderedSet(target.sources_relative_to_buildroot())
      if not self.validate_sources_present(sources, [target]):
        return None
      # Compute the source path relative to the 'source root' which is the path used at the
      # root of imports
      for source in sources:
        source_root = self.context.source_roots.find_by_path(source).path
        relative_proto_files.add(os.path.relpath(source, source_root))

    args = ['--generated-source-directory', target_workdir]

    for root in target.payload.roots:
      args.extend(['--root', root])

    for path in self._calculate_proto_paths(target):
      # NB(gmalmquist): This isn't a typo. The --source argument is actually a proto path.
      args.extend(['--source', path])

    for source in relative_proto_files:
      args.extend(['--proto', source])

    return args
Beispiel #5
0
  def parse_args(args):
    goals = OrderedSet()
    specs = OrderedSet()
    help = False
    explicit_multi = False

    def is_spec(spec):
      return os.sep in spec or ':' in spec

    for i, arg in enumerate(args):
      help = help or 'help' == arg
      if not arg.startswith('-'):
        specs.add(arg) if is_spec(arg) else goals.add(arg)
      elif '--' == arg:
        if specs:
          raise GoalError('Cannot intermix targets with goals when using --. Targets should '
                          'appear on the right')
        explicit_multi = True
        del args[i]
        break

    if explicit_multi:
      spec_offset = len(goals) + 1 if help else len(goals)
      specs.update(arg for arg in args[spec_offset:] if not arg.startswith('-'))

    return goals, specs
Beispiel #6
0
  def wrapper(func):
    if not inspect.isfunction(func):
      raise ValueError('The @rule decorator must be applied innermost of all decorators.')

    caller_frame = inspect.stack()[1][0]
    module_ast = ast.parse(inspect.getsource(func))

    def resolve_type(name):
      resolved = caller_frame.f_globals.get(name) or caller_frame.f_builtins.get(name)
      if not isinstance(resolved, (type, Exactly)):
        # TODO: should this say "...or Exactly instance;"?
        raise ValueError('Expected either a `type` constructor or TypeConstraint instance; '
                         'got: {}'.format(name))
      return resolved

    gets = OrderedSet()
    for node in ast.iter_child_nodes(module_ast):
      if isinstance(node, ast.FunctionDef) and node.name == func.__name__:
        rule_visitor = _RuleVisitor()
        rule_visitor.visit(node)
        gets.update(Get(resolve_type(p), resolve_type(s)) for p, s in rule_visitor.gets)

    func._rule = TaskRule(output_type, input_selectors, func, input_gets=list(gets))
    func.output_type = output_type
    func.goal = for_goal
    return func
Beispiel #7
0
 def get_jars_for_ivy_module(self, jar):
   ref = IvyModuleRef(jar.org, jar.name, jar.rev)
   deps = OrderedSet()
   for dep in self.deps_by_caller.get(ref, []):
     deps.add(dep)
     deps.update(self.get_jars_for_ivy_module(dep))
   return deps
Beispiel #8
0
  def execute_codegen(self, target, target_workdir):
    sources_by_base = self._calculate_sources(target)
    sources = target.sources_relative_to_buildroot()

    bases = OrderedSet(sources_by_base.keys())
    bases.update(self._proto_path_imports([target]))

    gen_flag = '--java_out'

    gen = '{0}={1}'.format(gen_flag, target_workdir)

    args = [self.protobuf_binary, gen]

    if self.plugins:
      for plugin in self.plugins:
        args.append("--{0}_out={1}".format(plugin, target_workdir))

    for base in bases:
      args.append('--proto_path={0}'.format(base))

    args.extend(sources)

    # Tack on extra path entries. These can be used to find protoc plugins
    protoc_environ = os.environ.copy()
    if self._extra_paths:
      protoc_environ['PATH'] = os.pathsep.join(self._extra_paths
                                               + protoc_environ['PATH'].split(os.pathsep))

    self.context.log.debug('Executing: {0}'.format('\\\n  '.join(args)))
    process = subprocess.Popen(args, env=protoc_environ)
    result = process.wait()
    if result != 0:
      raise TaskError('{0} ... exited non-zero ({1})'.format(self.protobuf_binary, result))
Beispiel #9
0
 def _flatten_type_constraints(self, selection_products):
   type_constraints = filter(lambda o: isinstance(o, Exactly), selection_products)
   non_type_constraints = filter(lambda o: not isinstance(o, Exactly), selection_products)
   flattened_products = OrderedSet(non_type_constraints)
   for t in type_constraints:
     flattened_products.update(t.types)
   return flattened_products
Beispiel #10
0
  def _aggregate(cls, name, provides, apt_libs):
    all_deps = OrderedSet()
    all_excludes = OrderedSet()
    all_sources = []
    all_resources = []
    all_binary_resources = []
    all_annotation_processors = []

    for apt_lib in apt_libs:
      if apt_lib.resolved_dependencies:
        all_deps.update(dep for dep in apt_lib.jar_dependencies if dep.rev is not None)
      if apt_lib.excludes:
        all_excludes.update(apt_lib.excludes)
      if apt_lib.sources:
        all_sources.extend(apt_lib.sources)
      if apt_lib.resources:
        all_resources.extend(apt_lib.resources)
      if apt_lib.binary_resources:
        all_binary_resources.extend(apt_lib.binary_resources)
      if apt_lib.processors:
        all_annotation_processors.extend(apt_lib.processors)

    return AnnotationProcessor(name,
                               all_sources,
                               provides = provides,
                               dependencies = all_deps,
                               excludes = all_excludes,
                               resources = all_resources,
                               binary_resources = all_binary_resources,
                               processors = all_annotation_processors,
                               is_meta = True)
Beispiel #11
0
 def dependents_of_addresses(self, addresses):
   """Given an iterable of addresses, yield all of those addresses dependents."""
   seen = OrderedSet(addresses)
   for address in addresses:
     seen.update(self._dependent_address_map[address])
     seen.update(self._implicit_dependent_address_map[address])
   return seen
Beispiel #12
0
    def targets(self, predicate=None, postorder=False):
        """Selects targets in-play in this run from the target roots and their transitive dependencies.

    Also includes any new synthetic targets created from the target roots or their transitive
    dependencies during the course of the run.

    :param predicate: If specified, the predicate will be used to narrow the scope of targets
                      returned.
    :param bool postorder: `True` to gather transitive dependencies with a postorder traversal;
                          `False` or preorder by default.
    :returns: A list of matching targets.
    """
        target_root_addresses = [target.address for target in self.target_roots]
        target_set = self._collect_targets(self.target_roots, postorder=postorder)

        synthetics = OrderedSet()
        for derived_from, synthetic_targets in self._synthetic_targets.items():
            if derived_from in target_set or derived_from in synthetics:
                synthetics.update(synthetic_targets)

        synthetic_set = self._collect_targets(synthetics, postorder=postorder)

        target_set.update(synthetic_set)

        return filter(predicate, target_set)
Beispiel #13
0
class RootedProducts(object):
  """File products of a build that have a concept of a 'root' directory.

  E.g., classfiles, under a root package directory."""
  def __init__(self, root):
    self._root = root
    self._rel_paths = OrderedSet()

  def add_abs_paths(self, abs_paths):
    for abs_path in abs_paths:
      if not abs_path.startswith(self._root):
        raise Exception('{} is not under {}'.format(abs_path, self._root))
      self._rel_paths.add(os.path.relpath(abs_path, self._root))

  def add_rel_paths(self, rel_paths):
    self._rel_paths.update(rel_paths)

  def root(self):
    return self._root

  def rel_paths(self):
    return self._rel_paths

  def abs_paths(self):
    for relpath in self._rel_paths:
      yield os.path.join(self._root, relpath)

  def __bool__(self):
    return self._rel_paths

  __nonzero__ = __bool__
Beispiel #14
0
def extract_target(java_targets, is_classpath):
  primary_target = InternalTarget.sort_targets(java_targets)[0]

  with ParseContext.temp(primary_target.target_base):
    internal_deps, jar_deps = _extract_target(java_targets, is_classpath)

    # TODO(John Sirois): make an empty source set work in ant/compile.xml
    sources = [ '__no_source__' ]

    all_deps = OrderedSet()
    all_deps.update(internal_deps)
    all_deps.update(jar_deps)

    if is_java(primary_target):
      return JavaLibrary('ide',
                         sources,
                         dependencies = all_deps,
                         is_meta = True)
    elif is_scala(primary_target):
      return ScalaLibrary('ide',
                          sources,
                          dependencies = all_deps,
                          is_meta = True)
    else:
      raise TypeError("Cannot generate IDE configuration for targets: %s" % java_targets)
Beispiel #15
0
 def minimum_path(cls):
   """
     Return as a tuple the emulated sys.path and sys.path_importer_cache of
     a bare python installation, a la python -S.
   """
   from site import USER_SITE
   from twitter.common.collections import OrderedSet
   from pkg_resources import find_distributions
   from distutils.sysconfig import get_python_lib
   site_libs = set([get_python_lib(plat_specific=False), get_python_lib(plat_specific=True)])
   site_distributions = OrderedSet()
   for path_element in sys.path:
     if any(path_element.startswith(site_lib) for site_lib in site_libs):
       TRACER.log('Inspecting path element: %s' % path_element)
       site_distributions.update(dist.location for dist in find_distributions(path_element))
   user_site_distributions = OrderedSet(dist.location for dist in find_distributions(USER_SITE))
   for path in site_distributions:
     TRACER.log('Scrubbing from site-packages: %s' % path)
   for path in user_site_distributions:
     TRACER.log('Scrubbing from user site: %s' % path)
   scrub_paths = site_distributions | user_site_distributions
   scrubbed_sys_path = list(OrderedSet(sys.path) - scrub_paths)
   scrub_from_importer_cache = filter(
     lambda key: any(key.startswith(path) for path in scrub_paths),
     sys.path_importer_cache.keys())
   scrubbed_importer_cache = dict((key, value) for (key, value) in sys.path_importer_cache.items()
     if key not in scrub_from_importer_cache)
   return scrubbed_sys_path, scrubbed_importer_cache
Beispiel #16
0
 def create_geninfo(key):
   gen_info = context.config.getdict('thrift-gen', key)
   gen = gen_info['gen']
   deps = OrderedSet()
   for dep in gen_info['deps']:
     deps.update(context.resolve(dep))
   return ThriftGen.GenInfo(gen, deps)
Beispiel #17
0
  def _aggregate(cls, name, provides, deployjar, buildflags, scala_libs, target_base):
    all_deps = OrderedSet()
    all_excludes = OrderedSet()
    all_sources = []
    all_java_sources = []
    all_resources = []
    all_binary_resources = []

    for scala_lib in scala_libs:
      if scala_lib.resolved_dependencies:
        all_deps.update(dep for dep in scala_lib.jar_dependencies if dep.rev is not None)
      if scala_lib.excludes:
        all_excludes.update(scala_lib.excludes)
      if scala_lib.sources:
        all_sources.extend(scala_lib.sources)
      if scala_lib.java_sources:
        all_java_sources.extend(scala_lib.java_sources)
      if scala_lib.resources:
        all_resources.extend(scala_lib.resources)
      if scala_lib.binary_resources:
        all_binary_resources.extend(scala_lib.binary_resources)

    return ScalaLibrary(name,
                        all_sources,
                        target_base = target_base,
                        java_sources = all_java_sources,
                        provides = provides,
                        dependencies = all_deps,
                        excludes = all_excludes,
                        resources = all_resources,
                        binary_resources = all_binary_resources,
                        deployjar = deployjar,
                        buildflags = buildflags,
                        is_meta = True)
Beispiel #18
0
 def get_transitive_jars(jar_lib):
   if not ivy_info:
     return OrderedSet()
   transitive_jars = OrderedSet()
   for jar in jar_lib.jar_dependencies:
     transitive_jars.update(ivy_info.get_jars_for_ivy_module(jar))
   return transitive_jars
Beispiel #19
0
  def ancestors(self):
    """Returns all BUILD files in ancestor directories of this BUILD file's parent directory."""

    def find_parent(dir):
      parent = os.path.dirname(dir)
      for parent_buildfile in BuildFile._get_all_build_files(parent):
        buildfile = os.path.join(parent, parent_buildfile)
        if os.path.exists(buildfile) and not os.path.isdir(buildfile):
          return parent, BuildFile.from_cache(self.root_dir,
                                              os.path.relpath(buildfile, self.root_dir))
      return parent, None

    parent_buildfiles = OrderedSet()

    def is_root(path):
      return os.path.abspath(self.root_dir) == os.path.abspath(path)

    parentdir = os.path.dirname(self.full_path)
    visited = set()
    while parentdir not in visited and not is_root(parentdir):
      visited.add(parentdir)
      parentdir, buildfile = find_parent(parentdir)
      if buildfile:
        parent_buildfiles.update(buildfile.family())

    return parent_buildfiles
Beispiel #20
0
  def execute(self):
    targets = self.context.targets()
    for conf in self.confs:
      outpath = os.path.join(self.workdir, '%s.%s.provides' %
                             (self.ivy_utils.identify(targets)[1], conf))
      if self.transitive:
        outpath += '.transitive'
      ivyinfo = self.ivy_utils.parse_xml_report(self.context.target_roots, conf)
      jar_paths = OrderedSet()
      for root in self.target_roots:
        jar_paths.update(self.get_jar_paths(ivyinfo, root, conf))

      with open(outpath, 'w') as outfile:
        def do_write(s):
          outfile.write(s)
          if self.also_write_to_stdout:
            sys.stdout.write(s)
        for jar in jar_paths:
          do_write('# from jar %s\n' % jar)
          for line in self.list_jar(jar):
            if line.endswith('.class'):
              class_name = line[:-6].replace('/', '.')
              do_write(class_name)
              do_write('\n')
      print('Wrote provides information to %s' % outpath)
Beispiel #21
0
  def to_jar_dependencies(relative_to, jar_library_specs, build_graph):
    """Convenience method to resolve a list of specs to JarLibraries and return its jars attributes.

    Expects that the jar_libraries are declared relative to this target.

    :API: public

    :param Address relative_to: address target that references jar_library_specs, for
      error messages
    :param list jar_library_specs: string specs to JavaLibrary targets. Note, this list should be returned
      by the caller's traversable_specs() implementation to make sure that the jar_dependency jars
      have been added to the build graph.
    :param BuildGraph build_graph: build graph instance used to search for specs
    :return: list of JarDependency instances represented by the library_specs
    """
    jar_deps = OrderedSet()
    for spec in jar_library_specs:
      if not isinstance(spec, string_types):
        raise JarLibrary.ExpectedAddressError(
          "{address}: expected imports to contain string addresses, got {found_class}."
          .format(address=relative_to.spec,
                  found_class=type(spec).__name__))

      lookup = Address.parse(spec, relative_to=relative_to.spec_path)
      target = build_graph.get_target(lookup)
      if not isinstance(target, JarLibrary):
        raise JarLibrary.WrongTargetTypeError(
          "{address}: expected {spec} to be jar_library target type, got {found_class}"
          .format(address=relative_to.spec,
                  spec=spec,
                  found_class=type(target).__name__))
      jar_deps.update(target.jar_dependencies)

    return list(jar_deps)
Beispiel #22
0
  def get_artifacts_for_jar_library(self, jar_library, memo=None):
    """Collects IvyArtifact instances for the passed jar_library.

    Because artifacts are only fetched for the "winning" version of a module, the artifacts
    will not always represent the version originally declared by the library.

    This method is transitive within the library's jar_dependencies, but will NOT
    walk into its non-jar dependencies.

    :param jar_library A JarLibrary to collect the transitive artifacts for.
    :param memo see `traverse_dependency_graph`
    """
    artifacts = OrderedSet()
    def create_collection(dep):
      return OrderedSet([dep])
    for jar in jar_library.jar_dependencies:
      jar_module_ref = IvyModuleRef(jar.org, jar.name, jar.rev)
      valid_classifiers = jar.artifact_classifiers
      artifacts_for_jar = []
      for module_ref in self.traverse_dependency_graph(jar_module_ref, create_collection, memo):
        artifacts_for_jar.extend(
          artifact for artifact in self._artifacts_by_ref[module_ref.unversioned]
          if artifact.classifier in valid_classifiers
        )

      artifacts.update(artifacts_for_jar)
    return artifacts
Beispiel #23
0
  def compute_classpath_entries(cls, targets, classpath_products, extra_classpath_tuples, confs):
    """Return the list of classpath entries for a classpath covering the passed targets.

    Filters and adds paths from extra_classpath_tuples to the end of the resulting list.

    :param targets: The targets to generate a classpath for.
    :param ClasspathProducts classpath_products: Product containing classpath elements.
    :param extra_classpath_tuples: Additional classpath entries as tuples of
      (string, ClasspathEntry).
    :param confs: The list of confs for use by this classpath.
    :returns: The classpath entries as a list of path elements.
    :rtype: list of ClasspathEntry
    """
    classpath_iter = cls._classpath_iter(
      classpath_products.get_classpath_entries_for_targets(targets),
      confs=confs,
    )
    total_classpath = OrderedSet(classpath_iter)

    filtered_extra_classpath_iter = cls._filtered_classpath_by_confs_iter(
      extra_classpath_tuples,
      confs,
    )
    extra_classpath_iter = cls._entries_iter(filtered_extra_classpath_iter)
    total_classpath.update(extra_classpath_iter)
    return list(total_classpath)
Beispiel #24
0
    def execute_codegen(self, target, target_workdir):
        sources_by_base = self._calculate_sources(target)
        sources = target.sources_relative_to_buildroot()

        bases = OrderedSet(sources_by_base.keys())
        bases.update(self._proto_path_imports([target]))

        gen_flag = "--java_out"

        gen = "{0}={1}".format(gen_flag, target_workdir)

        args = [self.protobuf_binary, gen]

        if self.plugins:
            for plugin in self.plugins:
                args.append("--{0}_out={1}".format(plugin, target_workdir))

        for base in bases:
            args.append("--proto_path={0}".format(base))

        args.extend(sources)

        # Tack on extra path entries. These can be used to find protoc plugins
        protoc_environ = os.environ.copy()
        if self._extra_paths:
            protoc_environ["PATH"] = os.pathsep.join(self._extra_paths + protoc_environ["PATH"].split(os.pathsep))

        # Note: The test_source_ordering integration test scrapes this output, so modify it with care.
        self.context.log.debug("Executing: {0}".format("\\\n  ".join(args)))
        with self.context.new_workunit(name="protoc", labels=[WorkUnitLabel.TOOL], cmd=" ".join(args)) as workunit:
            result = subprocess.call(
                args, env=protoc_environ, stdout=workunit.output("stdout"), stderr=workunit.output("stderr")
            )
            if result != 0:
                raise TaskError("{} ... exited non-zero ({})".format(self.protobuf_binary, result))
Beispiel #25
0
  def _resolve_java_deps(self, target):
    key = self._CONFIG_SECTION_BY_COMPILER[target.compiler]

    deps = OrderedSet()
    for dep in self.context.config.getlist(key, 'javadeps'):
        deps.update(self.context.resolve(dep))
    return deps
Beispiel #26
0
  def _detect_cycle(self, src, dest):
    """Given a src and a dest, each of which _might_ already exist in the graph, detect cycles.

    Return a path of Nodes that describe the cycle, or None.
    """
    path = OrderedSet()
    walked = set()
    def _walk(node):
      if node in path:
        return tuple(path) + (node,)
      if node in walked:
        return None
      path.add(node)
      walked.add(node)

      for dep in self.dependencies_of(node):
        found = _walk(dep)
        if found is not None:
          return found
      path.discard(node)
      return None

    # Initialize the path with src (since the edge from src->dest may not actually exist), and
    # then walk from the dest.
    path.update([src])
    return _walk(dest)
Beispiel #27
0
  def checkstyle(self, targets, sources):
    runtime_classpaths = self.context.products.get_data('runtime_classpath')
    union_classpath = OrderedSet(self.tool_classpath('checkstyle'))
    for target in targets:
      runtime_classpath = runtime_classpaths.get_for_targets(target.closure(bfs=True))
      union_classpath.update(jar for conf, jar in runtime_classpath
                             if conf in self.get_options().confs)

    args = [
      '-c', self.get_options().configuration,
      '-f', 'plain'
    ]

    if self.get_options().properties:
      properties_file = os.path.join(self.workdir, 'checkstyle.properties')
      with safe_open(properties_file, 'w') as pf:
        for k, v in self.get_options().properties.items():
          pf.write('{key}={value}\n'.format(key=k, value=v))
      args.extend(['-p', properties_file])

    # We've hit known cases of checkstyle command lines being too long for the system so we guard
    # with Xargs since checkstyle does not accept, for example, @argfile style arguments.
    def call(xargs):
      return self.runjava(classpath=union_classpath, main=self._CHECKSTYLE_MAIN,
                          jvm_options=self.get_options().jvm_options,
                          args=args + xargs, workunit_name='checkstyle')
    checks = Xargs(call)

    return checks.execute(sources)
Beispiel #28
0
  def scan_addresses(root_dir, base_path = None):
    """Parses all targets available in BUILD files under base_path and returns their addresses.  If no
    base_path is specified, root_dir is assumed to be the base_path"""

    addresses = OrderedSet()
    for buildfile in BuildFile.scan_buildfiles(root_dir, base_path):
      addresses.update(Target.get_all_addresses(buildfile))
    return addresses
Beispiel #29
0
 def get_jar_paths_for_ivy_module(self, ivyinfo, ref):
   jar_paths = OrderedSet()
   module = ivyinfo.modules_by_ref[ref]
   jar_paths.update([a.path for a in module.artifacts])
   if self.transitive:
     for dep in ivyinfo.deps_by_caller.get(ref, []):
       jar_paths.update(self.get_jar_paths_for_ivy_module(ivyinfo, dep))
   return jar_paths
Beispiel #30
0
 def resolve_deps(self, unresolved_deps):
   deps = OrderedSet()
   for dep in unresolved_deps:
     try:
       deps.update(self.context.resolve(dep))
     except AddressLookupError as e:
       raise self.DepLookupError('{message}\n  on dependency {dep}'.format(message=e, dep=dep))
   return deps
Beispiel #31
0
class Build(Command):
  """Builds a specified target."""

  __command__ = 'build'

  def setup_parser(self, parser, args):
    parser.set_usage("\n"
                     "  %prog build (options) [spec] (build args)\n"
                     "  %prog build (options) [spec]... -- (build args)")
    parser.add_option("-t", "--timeout", dest="conn_timeout", type="int",
                      default=Config.load().getdefault('connection_timeout'),
                      help="Number of seconds to wait for http connections.")
    parser.add_option('-i', '--interpreter', dest='interpreter', default=None,
                      help='The interpreter requirement for this chroot.')
    parser.add_option('-v', '--verbose', dest='verbose', default=False, action='store_true',
                      help='Show verbose output.')
    parser.disable_interspersed_args()
    parser.epilog = ('Builds the specified Python target(s). Use ./pants goal for JVM and other '
                     'targets.')

  def __init__(self, run_tracker, root_dir, parser, argv):
    Command.__init__(self, run_tracker, root_dir, parser, argv)

    if not self.args:
      self.error("A spec argument is required")

    self.config = Config.load()
    self.interpreter_cache = PythonInterpreterCache(self.config, logger=self.debug)
    self.interpreter_cache.setup()
    interpreters = self.interpreter_cache.select_interpreter(
        list(self.interpreter_cache.matches([self.options.interpreter]
            if self.options.interpreter else [''])))
    if len(interpreters) != 1:
      self.error('Unable to detect suitable interpreter.')
    else:
      self.debug('Selected %s' % interpreters[0])
    self.interpreter = interpreters[0]

    try:
      specs_end = self.args.index('--')
      if len(self.args) > specs_end:
        self.build_args = self.args[specs_end+1:len(self.args)+1]
      else:
        self.build_args = []
    except ValueError:
      specs_end = 1
      self.build_args = self.args[1:] if len(self.args) > 1 else []

    self.targets = OrderedSet()
    for spec in self.args[0:specs_end]:
      try:
        address = Address.parse(root_dir, spec)
      except:
        self.error("Problem parsing spec %s: %s" % (spec, traceback.format_exc()))

      try:
        target = Target.get(address)
      except:
        self.error("Problem parsing BUILD target %s: %s" % (address, traceback.format_exc()))

      if not target:
        self.error("Target %s does not exist" % address)
      self.targets.update(tgt for tgt in target.resolve() if tgt.is_concrete)

  def debug(self, message):
    if self.options.verbose:
      print(message, file=sys.stderr)

  def execute(self):
    print("Build operating on targets: %s" % self.targets)

    python_targets = OrderedSet()
    for target in self.targets:
      if target.is_python:
        python_targets.add(target)
      else:
        self.error("Cannot build target %s" % target)

    if python_targets:
      status = self._python_build(python_targets)
    else:
      status = -1

    return status

  def _python_build(self, targets):
    try:
      executor = PythonBuilder(self.run_tracker, self.root_dir)
      return executor.build(
        targets,
        self.build_args,
        interpreter=self.interpreter,
        conn_timeout=self.options.conn_timeout)
    except:
      self.error("Problem executing PythonBuilder for targets %s: %s" % (targets,
                                                                         traceback.format_exc()))
Beispiel #32
0
  def _run_tests(self, tests_to_targets):
    if self._coverage:
      extra_jvm_options = self._coverage.extra_jvm_options
      classpath_prepend = self._coverage.classpath_prepend
      classpath_append = self._coverage.classpath_append
    else:
      extra_jvm_options = []
      classpath_prepend = ()
      classpath_append = ()

    tests_by_properties = self._tests_by_properties(
      tests_to_targets,
      self._infer_workdir,
      lambda target: target.test_platform,
      lambda target: target.payload.extra_jvm_options,
      lambda target: target.payload.extra_env_vars,
    )

    # the below will be None if not set, and we'll default back to runtime_classpath
    classpath_product = self.context.products.get_data('instrument_classpath')

    result = 0
    for (workdir, platform, target_jvm_options, target_env_vars), tests in tests_by_properties.items():
      for batch in self._partition(tests):
        # Batches of test classes will likely exist within the same targets: dedupe them.
        relevant_targets = set(map(tests_to_targets.get, batch))
        complete_classpath = OrderedSet()
        complete_classpath.update(classpath_prepend)
        complete_classpath.update(self.tool_classpath('junit'))
        complete_classpath.update(self.classpath(relevant_targets,
                                                 classpath_product=classpath_product))
        complete_classpath.update(classpath_append)
        distribution = JvmPlatform.preferred_jvm_distribution([platform], self._strict_jvm_version)
        with binary_util.safe_args(batch, self.get_options()) as batch_tests:
          self.context.log.debug('CWD = {}'.format(workdir))
          self.context.log.debug('platform = {}'.format(platform))
          with environment_as(**dict(target_env_vars)):
            result += abs(self._spawn_and_wait(
              executor=SubprocessExecutor(distribution),
              distribution=distribution,
              classpath=complete_classpath,
              main=JUnitRun._MAIN,
              jvm_options=self.jvm_options + extra_jvm_options + list(target_jvm_options),
              args=self._args + batch_tests + [u'-xmlreport'],
              workunit_factory=self.context.new_workunit,
              workunit_name='run',
              workunit_labels=[WorkUnitLabel.TEST],
              cwd=workdir,
              synthetic_jar_dir=self.workdir,
              create_synthetic_jar=self.synthetic_classpath,
            ))

          if result != 0 and self._fail_fast:
            break

    if result != 0:
      failed_targets_and_tests = self._get_failed_targets(tests_to_targets)
      failed_targets = sorted(failed_targets_and_tests, key=lambda target: target.address.spec)
      error_message_lines = []
      if self._failure_summary:
        for target in failed_targets:
          error_message_lines.append('\n{0}{1}'.format(' '*4, target.address.spec))
          for test in sorted(failed_targets_and_tests[target]):
            error_message_lines.append('{0}{1}'.format(' '*8, test))
      error_message_lines.append(
        '\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.'
          .format(main=JUnitRun._MAIN, code=result, failed=len(failed_targets),
                  targets=pluralize(len(failed_targets), 'target'))
      )
      raise TestFailedTaskError('\n'.join(error_message_lines), failed_targets=list(failed_targets))
Beispiel #33
0
 def calculate_tests(self, targets):
   tests = OrderedSet()
   for target in targets:
     if is_scala(target) and is_test(target):
       tests.update(os.path.join(target.target_base, test) for test in target.sources)
   return tests
Beispiel #34
0
 def sections(self) -> List[str]:
   ret = OrderedSet()
   for cfg in self._configs:
     ret.update(cfg.sections())
   return list(ret)
Beispiel #35
0
class InternalTarget(Target):
    """A baseclass for targets that support an optional dependency set."""
    class CycleException(Exception):
        """Thrown when a circular dependency is detected."""
        def __init__(self, precedents, cycle):
            Exception.__init__(
                self, 'Cycle detected along path:\n\t%s' % (' ->\n\t'.join(
                    str(target.address)
                    for target in list(precedents) + [cycle])))

    @classmethod
    def check_cycles(cls, internal_target):
        """Validates the given InternalTarget has no circular dependencies.  Raises CycleException if
    it does."""
        dep_stack = OrderedSet()  # The DFS stack.
        visited = set()  # Prevent expensive re-checking of subgraphs.

        def descend(internal_dep):
            if internal_dep in dep_stack:
                raise InternalTarget.CycleException(dep_stack, internal_dep)
            if hasattr(internal_dep, 'internal_dependencies'):
                dep_stack.add(internal_dep)
                for dep in internal_dep.internal_dependencies:
                    if dep not in visited:
                        descend(dep)
                        visited.add(dep)
                dep_stack.remove(internal_dep)

        descend(internal_target)

    @classmethod
    def sort_targets(cls, internal_targets):
        """Returns a list of targets that internal_targets depend on sorted from most dependent to
    least."""

        roots = OrderedSet()
        inverted_deps = collections.defaultdict(
            OrderedSet)  # target -> dependent targets
        visited = set()

        def invert(target):
            if target not in visited:
                visited.add(target)
                if getattr(target, 'internal_dependencies', None):
                    for internal_dependency in target.internal_dependencies:
                        if isinstance(internal_dependency, InternalTarget):
                            inverted_deps[internal_dependency].add(target)
                            invert(internal_dependency)
                else:
                    roots.add(target)

        for internal_target in internal_targets:
            invert(internal_target)

        sorted = []
        visited.clear()

        def topological_sort(target):
            if target not in visited:
                visited.add(target)
                if target in inverted_deps:
                    for dep in inverted_deps[target]:
                        topological_sort(dep)
                sorted.append(target)

        for root in roots:
            topological_sort(root)

        return sorted

    @classmethod
    def coalesce_targets(cls, internal_targets, discriminator):
        """Returns a list of targets internal_targets depend on sorted from most dependent to least and
    grouped where possible by target type as categorized by the given discriminator."""

        sorted_targets = InternalTarget.sort_targets(internal_targets)

        # can do no better for any of these:
        # []
        # [a]
        # [a,b]
        if len(sorted_targets) <= 2:
            return sorted_targets

        # For these, we'd like to coalesce if possible, like:
        # [a,b,a,c,a,c] -> [a,a,a,b,c,c]
        # adopt a quadratic worst case solution, when we find a type change edge, scan forward for
        # the opposite edge and then try to swap dependency pairs to move the type back left to its
        # grouping.  If the leftwards migration fails due to a dependency constraint, we just stop
        # and move on leaving "type islands".
        current_type = None

        # main scan left to right no backtracking
        for i in range(len(sorted_targets) - 1):
            current_target = sorted_targets[i]
            if current_type != discriminator(current_target):
                scanned_back = False

                # scan ahead for next type match
                for j in range(i + 1, len(sorted_targets)):
                    look_ahead_target = sorted_targets[j]
                    if current_type == discriminator(look_ahead_target):
                        scanned_back = True

                        # swap this guy as far back as we can
                        for k in range(j, i, -1):
                            previous_target = sorted_targets[k - 1]
                            mismatching_types = current_type != discriminator(
                                previous_target)
                            not_a_dependency = look_ahead_target not in previous_target.internal_dependencies
                            if mismatching_types and not_a_dependency:
                                sorted_targets[k] = sorted_targets[k - 1]
                                sorted_targets[k - 1] = look_ahead_target
                            else:
                                break  # out of k

                        break  # out of j

                if not scanned_back:  # done with coalescing the current type, move on to next
                    current_type = discriminator(current_target)

        return sorted_targets

    def sort(self):
        """Returns a list of targets this target depends on sorted from most dependent to least."""

        return InternalTarget.sort_targets([self])

    def coalesce(self, discriminator):
        """Returns a list of targets this target depends on sorted from most dependent to least and
    grouped where possible by target type as categorized by the given discriminator."""

        return InternalTarget.coalesce_targets([self], discriminator)

    def __init__(self, name, dependencies, is_meta):
        Target.__init__(self, name, is_meta)

        self.add_label('internal')
        self.dependencies = OrderedSet()
        self.internal_dependencies = OrderedSet()
        self.jar_dependencies = OrderedSet()

        # TODO(John Sirois): if meta targets were truly built outside parse contexts - we could instead
        # just use the more general check: if parsing: delay(doit) else: doit()
        # Fix how target _ids are built / addresses to not require a BUILD file - ie: support anonymous,
        # non-addressable targets - which is what meta-targets really are once created.
        if is_meta:
            # Meta targets are built outside any parse context - so update dependencies immediately
            self.update_dependencies(dependencies)
        else:
            # Defer dependency resolution after parsing the current BUILD file to allow for forward
            # references
            self._post_construct(self.update_dependencies, dependencies)

    def update_dependencies(self, dependencies):
        if dependencies:
            for dependency in dependencies:
                for resolved_dependency in dependency.resolve():
                    self.dependencies.add(resolved_dependency)
                    if isinstance(resolved_dependency, InternalTarget):
                        self.internal_dependencies.add(resolved_dependency)
                    if hasattr(resolved_dependency, '_as_jar_dependencies'):
                        self.jar_dependencies.update(
                            resolved_dependency._as_jar_dependencies())

    def replace_dependency(self, dependency, replacement):
        self.dependencies.discard(dependency)
        self.internal_dependencies.discard(dependency)
        self.jar_dependencies.discard(dependency)
        self.update_dependencies([replacement])

    def _walk(self, walked, work, predicate=None):
        Target._walk(self, walked, work, predicate)
        for dep in self.dependencies:
            if isinstance(dep, Target) and not dep in walked:
                walked.add(dep)
                if not predicate or predicate(dep):
                    additional_targets = work(dep)
                    dep._walk(walked, work, predicate)
                    if additional_targets:
                        for additional_target in additional_targets:
                            additional_target._walk(walked, work, predicate)
Beispiel #36
0
class Context(object):
    """Contains the context for a single run of pants.

  Goal implementations can access configuration data from pants.ini and any flags they have exposed
  here as well as information about the targets involved in the run.

  Advanced uses of the context include adding new targets to it for upstream or downstream goals to
  operate on and mapping of products a goal creates to the targets the products are associated with.
  """
    class Log(object):
        def debug(self, msg):
            pass

        def info(self, msg):
            pass

        def warn(self, msg):
            pass

    def __init__(self, config, options, target_roots, lock=None, log=None):
        self._config = config
        self._options = options
        self._lock = lock or Lock.unlocked()
        self._log = log or Context.Log()
        self._state = {}
        self._products = Products()

        self.replace_targets(target_roots)

    @property
    def config(self):
        """Returns a Config object containing the configuration data found in pants.ini."""
        return self._config

    @property
    def options(self):
        """Returns the command line options parsed at startup."""
        return self._options

    @property
    def lock(self):
        """Returns the global pants run lock so a goal can release it if needed."""
        return self._lock

    @property
    def log(self):
        """Returns the preferred logger for goals to use."""
        return self._log

    @property
    def products(self):
        """Returns the Products manager for the current run."""
        return self._products

    @property
    def target_roots(self):
        """Returns the targets specified on the command line.

    This set is strictly a subset of all targets in play for the run as returned by self.targets().
    Note that for a command line invocation that uses wildcard selectors : or ::, the targets
    globbed by the wildcards are considered to be target roots.
    """
        return self._target_roots

    def __str__(self):
        return 'Context(id:%s, state:%s, targets:%s)' % (self.id, self.state,
                                                         self.targets())

    def replace_targets(self, target_roots):
        """Replaces all targets in the context with the given roots and their transitive
    dependencies.
    """
        self._target_roots = target_roots
        self._targets = OrderedSet()
        for target in target_roots:
            self.add_target(target)
        self.id = Target.identify(self._targets)

    def add_target(self, target):
        """Adds a target and its transitive dependencies to the run context.

    The target is not added to the target roots.
    """
        def add_targets(tgt):
            self._targets.update(tgt.resolve())

        target.walk(add_targets)

    def add_new_target(self, target_base, target_type, *args, **kwargs):
        """Creates a new target, adds it to the context and returns it.

    This method ensures the target resolves files against the given target_base, creating the
    directory if needed and registering a source root.
    """
        target = self._create_new_target(target_base, target_type, *args,
                                         **kwargs)
        self.add_target(target)
        return target

    def _create_new_target(self, target_base, target_type, *args, **kwargs):
        if not os.path.exists(target_base):
            os.makedirs(target_base)
        SourceRoot.register(target_base, target_type)
        with ParseContext.temp(target_base):
            return target_type(*args, **kwargs)

    def remove_target(self, target):
        """Removes the given Target object from the context completely if present."""
        if target in self.target_roots:
            self.target_roots.remove(target)
        self._targets.discard(target)

    def targets(self, predicate=None):
        """Selects targets in-play in this run from the target roots and their transitive dependencies.

    If specified, the predicate will be used to narrow the scope of targets returned.
    """
        return filter(predicate, self._targets)

    def dependants(self, on_predicate=None, from_predicate=None):
        """Returns  a map from targets that satisfy the from_predicate to targets they depend on that
      satisfy the on_predicate.
    """
        core = set(self.targets(on_predicate))
        dependees = defaultdict(set)
        for target in self.targets(from_predicate):
            if hasattr(target, 'dependencies'):
                for dependency in target.dependencies:
                    if dependency in core:
                        dependees[target].add(dependency)
        return dependees

    def resolve(self, spec):
        """Returns an iterator over the target(s) the given address points to."""
        with ParseContext.temp():
            return Pants(spec).resolve()

    @contextmanager
    def state(self, key, default=None):
        value = self._state.get(key, default)
        yield value
        self._state[key] = value
Beispiel #37
0
 def sections(self):
     ret = OrderedSet()
     for cfg in self.configs:
         ret.update(cfg.sections())
     return ret
Beispiel #38
0
class BuildConfiguration(object):
    """Stores the types and helper functions exposed to BUILD files."""
    class ParseState(
            namedtuple('ParseState', ['parse_context', 'parse_globals'])):
        @property
        def objects(self):
            return self.parse_context._storage.objects

    def __init__(self):
        self._target_by_alias = {}
        self._target_macro_factory_by_alias = {}
        self._exposed_object_by_alias = {}
        self._exposed_context_aware_object_factory_by_alias = {}
        self._optionables = OrderedSet()
        self._rules = OrderedSet()

    def registered_aliases(self):
        """Return the registered aliases exposed in BUILD files.

    These returned aliases aren't so useful for actually parsing BUILD files.
    They are useful for generating things like http://pantsbuild.github.io/build_dictionary.html.

    :returns: A new BuildFileAliases instance containing this BuildConfiguration's registered alias
              mappings.
    :rtype: :class:`pants.build_graph.build_file_aliases.BuildFileAliases`
    """
        target_factories_by_alias = self._target_by_alias.copy()
        target_factories_by_alias.update(self._target_macro_factory_by_alias)
        return BuildFileAliases(
            targets=target_factories_by_alias,
            objects=self._exposed_object_by_alias.copy(),
            context_aware_object_factories=self.
            _exposed_context_aware_object_factory_by_alias.copy())

    def register_aliases(self, aliases):
        """Registers the given aliases to be exposed in parsed BUILD files.

    :param aliases: The BuildFileAliases to register.
    :type aliases: :class:`pants.build_graph.build_file_aliases.BuildFileAliases`
    """
        if not isinstance(aliases, BuildFileAliases):
            raise TypeError(
                'The aliases must be a BuildFileAliases, given {}'.format(
                    aliases))

        for alias, target_type in aliases.target_types.items():
            self._register_target_alias(alias, target_type)

        for alias, target_macro_factory in aliases.target_macro_factories.items(
        ):
            self._register_target_macro_factory_alias(alias,
                                                      target_macro_factory)

        for alias, obj in aliases.objects.items():
            self._register_exposed_object(alias, obj)

        for alias, context_aware_object_factory in aliases.context_aware_object_factories.items(
        ):
            self._register_exposed_context_aware_object_factory(
                alias, context_aware_object_factory)

    # TODO(John Sirois): Warn on alias override across all aliases since they share a global
    # namespace in BUILD files.
    # See: https://github.com/pantsbuild/pants/issues/2151
    def _register_target_alias(self, alias, target_type):
        if alias in self._target_by_alias:
            logger.debug(
                'Target alias {} has already been registered. Overwriting!'.
                format(alias))

        self._target_by_alias[alias] = target_type
        self.register_optionables(target_type.subsystems())

    def _register_target_macro_factory_alias(self, alias,
                                             target_macro_factory):
        if alias in self._target_macro_factory_by_alias:
            logger.debug(
                'TargetMacro alias {} has already been registered. Overwriting!'
                .format(alias))

        self._target_macro_factory_by_alias[alias] = target_macro_factory
        for target_type in target_macro_factory.target_types:
            self.register_optionables(target_type.subsystems())

    def _register_exposed_object(self, alias, obj):
        if alias in self._exposed_object_by_alias:
            logger.debug(
                'Object alias {} has already been registered. Overwriting!'.
                format(alias))

        self._exposed_object_by_alias[alias] = obj
        # obj doesn't implement any common base class, so we have to test for this attr.
        if hasattr(obj, 'subsystems'):
            self.register_optionables(obj.subsystems())

    def _register_exposed_context_aware_object_factory(
            self, alias, context_aware_object_factory):
        if alias in self._exposed_context_aware_object_factory_by_alias:
            logger.debug(
                'This context aware object factory alias {} has already been registered. '
                'Overwriting!'.format(alias))

        self._exposed_context_aware_object_factory_by_alias[
            alias] = context_aware_object_factory

    @deprecated('1.15.0.dev1', hint_message='Use self.register_optionables().')
    def register_subsystems(self, subsystems):
        return self.register_optionables(subsystems)

    def register_optionables(self, optionables):
        """Registers the given subsystem types.

    :param optionables: The Optionable types to register.
    :type optionables: :class:`collections.Iterable` containing
                       :class:`pants.option.optionable.Optionable` subclasses.
    """
        if not isinstance(optionables, Iterable):
            raise TypeError(
                'The optionables must be an iterable, given {}'.format(
                    optionables))
        optionables = tuple(optionables)
        if not optionables:
            return

        invalid_optionables = [
            s for s in optionables
            if not isinstance(s, type) or not issubclass(s, Optionable)
        ]
        if invalid_optionables:
            raise TypeError(
                'The following items from the given optionables are not Optionable '
                'subclasses:\n\t{}'.format('\n\t'.join(
                    str(i) for i in invalid_optionables)))

        self._optionables.update(optionables)

    def optionables(self):
        """Returns the registered Optionable types.

    :rtype set
    """
        return self._optionables

    @deprecated('1.15.0.dev1', hint_message='Use self.optionables().')
    def subsystems(self):
        """Returns the registered Subsystem types.

    :rtype set
    """
        return {o for o in self._optionables if issubclass(o, Subsystem)}

    def register_rules(self, rules):
        """Registers the given rules.

    param rules: The rules to register.
    :type rules: :class:`collections.Iterable` containing
                 :class:`pants.engine.rules.Rule` instances.
    """
        if not isinstance(rules, Iterable):
            raise TypeError(
                'The rules must be an iterable, given {!r}'.format(rules))

        # "Index" the rules to normalize them and expand their dependencies.
        indexed_rules = RuleIndex.create(rules).normalized_rules()

        # Store the rules and record their dependency Optionables.
        self._rules.update(indexed_rules)
        dependency_optionables = {
            do
            for rule in indexed_rules for do in rule.dependency_optionables
            if rule.dependency_optionables
        }
        self.register_optionables(dependency_optionables)

    def rules(self):
        """Returns the registered rules.

    :rtype list
    """
        return list(self._rules)

    @memoized_method
    def _get_addressable_factory(self, target_type, alias):
        return TargetAddressable.factory(target_type=target_type, alias=alias)

    def initialize_parse_state(self, build_file):
        """Creates a fresh parse state for the given build file.

    :param build_file: The BUILD file to set up a new ParseState for.
    :type build_file: :class:`pants.base.build_file.BuildFile`
    :returns: A fresh ParseState for parsing the given `build_file` with.
    :rtype: :class:`BuildConfiguration.ParseState`
    """
        # TODO(John Sirois): Introduce a factory method to seal the BuildConfiguration and add a check
        # there that all anonymous types are covered by context aware object factories that are
        # Macro instances.  Without this, we could have non-Macro context aware object factories being
        # asked to be a BuildFileTargetFactory when they are not (in SourceRoot registration context).
        # See: https://github.com/pantsbuild/pants/issues/2125
        type_aliases = self._exposed_object_by_alias.copy()
        parse_context = ParseContext(rel_path=build_file.spec_path,
                                     type_aliases=type_aliases)

        def create_call_proxy(tgt_type, tgt_alias=None):
            def registration_callback(address, addressable):
                parse_context._storage.add(addressable,
                                           name=address.target_name)

            addressable_factory = self._get_addressable_factory(
                tgt_type, tgt_alias)
            return AddressableCallProxy(
                addressable_factory=addressable_factory,
                build_file=build_file,
                registration_callback=registration_callback)

        # Expose all aliased Target types.
        for alias, target_type in self._target_by_alias.items():
            proxy = create_call_proxy(target_type, alias)
            type_aliases[alias] = proxy

        # Expose aliases for exposed objects and targets in the BUILD file.
        parse_globals = type_aliases.copy()

        # Now its safe to add mappings from both the directly exposed and macro-created target types to
        # their call proxies for context awares and macros to use to manufacture targets by type
        # instead of by alias.
        for alias, target_type in self._target_by_alias.items():
            proxy = type_aliases[alias]
            type_aliases[target_type] = proxy

        for target_macro_factory in self._target_macro_factory_by_alias.values(
        ):
            for target_type in target_macro_factory.target_types:
                proxy = create_call_proxy(target_type)
                type_aliases[target_type] = proxy

        for alias, object_factory in self._exposed_context_aware_object_factory_by_alias.items(
        ):
            parse_globals[alias] = object_factory(parse_context)

        for alias, target_macro_factory in self._target_macro_factory_by_alias.items(
        ):
            parse_globals[alias] = target_macro_factory.target_macro(
                parse_context)

        return self.ParseState(parse_context, parse_globals)
Beispiel #39
0
 def normalized_rules(self):
     rules = OrderedSet(rule for ruleset in self.rules.values()
                        for rule in ruleset)
     rules.update(self.roots)
     return self.NormalizedRules(rules, self.union_rules)
Beispiel #40
0
    def configure(self):
        """Configures this project's source sets returning the full set of targets the project is
    comprised of.  The full set can be larger than the initial set of targets when any of the
    initial targets only has partial ownership of its source set's directories."""

        # TODO(John Sirois): much waste lies here, revisit structuring for more readable and efficient
        # construction of source sets and excludes ... and add a test!

        analyzed = OrderedSet()
        targeted = set()

        def source_target(target):
            return has_sources(target) and not target.is_codegen

        def configure_source_sets(relative_base, sources, is_test):
            absolute_base = os.path.join(self.root_dir, relative_base)
            paths = set([os.path.dirname(source) for source in sources])
            for path in paths:
                absolute_path = os.path.join(absolute_base, path)
                if absolute_path not in targeted:
                    targeted.add(absolute_path)
                    self.sources.append(
                        SourceSet(self.root_dir, relative_base, path, is_test))

        def find_source_basedirs(target):
            dirs = set()
            if source_target(target):
                absolute_base = os.path.join(self.root_dir, target.target_base)
                dirs.update([
                    os.path.join(absolute_base, os.path.dirname(source))
                    for source in target.sources
                ])
            return dirs

        def configure_target(target):
            if target not in analyzed:
                analyzed.add(target)

                self.has_scala = self.has_scala or is_scala(target)

                if isinstance(target, JavaLibrary) or isinstance(
                        target, ScalaLibrary):
                    # TODO(John Sirois): this does not handle test resources, make test resources 1st class
                    # in ant build and punch this through to pants model
                    resources = set()
                    if target.resources:
                        resources.update(target.resources)
                    if target.binary_resources:
                        resources.update(target.binary_resources)
                    if resources:
                        self.resource_extensions.update(
                            Project.extract_resource_extensions(resources))
                        configure_source_sets(
                            ExportableJvmLibrary.RESOURCES_BASE_DIR,
                            resources,
                            is_test=False)

                if target.sources:
                    test = is_test(target)
                    self.has_tests = self.has_tests or test
                    configure_source_sets(target.target_base,
                                          target.sources,
                                          is_test=test)

                # Other BUILD files may specify sources in the same directory as this target.  Those BUILD
                # files might be in parent directories (globs('a/b/*.java')) or even children directories if
                # this target globs children as well.  Gather all these candidate BUILD files to test for
                # sources they own that live in the directories this targets sources live in.
                target_dirset = find_source_basedirs(target)
                candidates = Target.get_all_addresses(target.address.buildfile)
                for ancestor in target.address.buildfile.ancestors():
                    candidates.update(Target.get_all_addresses(ancestor))
                for sibling in target.address.buildfile.siblings():
                    candidates.update(Target.get_all_addresses(sibling))
                for descendant in target.address.buildfile.descendants():
                    candidates.update(Target.get_all_addresses(descendant))

                def is_sibling(target):
                    return source_target(
                        target) and target_dirset.intersection(
                            find_source_basedirs(target))

                return filter(
                    is_sibling,
                    [Target.get(a) for a in candidates if a != target.address])

        for target in self.targets:
            target.walk(configure_target, predicate=source_target)

        # We need to figure out excludes, in doing so there are 2 cases we should not exclude:
        # 1.) targets depend on A only should lead to an exclude of B
        # A/BUILD
        # A/B/BUILD
        #
        # 2.) targets depend on A and C should not lead to an exclude of B (would wipe out C)
        # A/BUILD
        # A/B
        # A/B/C/BUILD
        #
        # 1 approach: build set of all paths and parent paths containing BUILDs our targets depend on -
        # these are unexcludable

        unexcludable_paths = set()
        for source_set in self.sources:
            parent = os.path.join(self.root_dir, source_set.source_base,
                                  source_set.path)
            while True:
                unexcludable_paths.add(parent)
                parent, dir = os.path.split(parent)
                # no need to add the repo root or above, all source paths and extra paths are children
                if parent == self.root_dir:
                    break

        for source_set in self.sources:
            paths = set()
            source_base = os.path.join(self.root_dir, source_set.source_base)
            for root, dirs, _ in os.walk(
                    os.path.join(source_base, source_set.path)):
                if dirs:
                    paths.update([os.path.join(root, dir) for dir in dirs])
            unused_children = paths - targeted
            if unused_children:
                for child in unused_children:
                    if child not in unexcludable_paths:
                        source_set.excludes.append(
                            os.path.relpath(child, source_base))

        targets = OrderedSet()
        for target in self.targets:
            target.walk(lambda target: targets.add(target), source_target)
        targets.update(analyzed - targets)
        return targets
Beispiel #41
0
  def console_output(self, _):
    buildfiles = OrderedSet()
    address_mapper = self.context.address_mapper
    if self._dependees_types:
      base_paths = OrderedSet()
      for dependees_type in self._dependees_types:
        target_types = self.target_types_for_alias(dependees_type)
        # Try to find the SourceRoots for the given input type alias
        for target_type in target_types:
          try:
            roots = SourceRoot.roots(target_type)
            base_paths.update(roots)
          except KeyError:
            pass

      # TODO(John Sirois): BUG: This should not cause a failure, it should just force a slower full
      # scan.
      # TODO(John Sirois): BUG: The --type argument only limited the scn bases, it does no limit the
      # types of targets found under those bases, ie: we may have just limited our scan to roots
      # containing java_library, but those same roots likely also contain jvm_binary targets that
      # we do not wish to have in the results.  So the --type filtering needs to apply to the final
      # dependees_by_target map as well below.
      if not base_paths:
        raise TaskError(dedent("""\
                        No SourceRoot set for any of these target types: {}.
                        Please define a source root in BUILD file as:
                          source_root('<src-folder>', {})
                        """.format(' '.join(self._dependees_types),
                                   ', '.join(self._dependees_types))).strip())
      for base_path in base_paths:
        scanned = address_mapper.scan_buildfiles(get_buildroot(),
                                                 os.path.join(get_buildroot(), base_path),
                                                 spec_excludes=self._spec_excludes)
        buildfiles.update(scanned)
    else:
      buildfiles = address_mapper.scan_buildfiles(get_buildroot(),
                                                  spec_excludes=self._spec_excludes)

    build_graph = self.context.build_graph
    build_file_parser = self.context.build_file_parser

    dependees_by_target = defaultdict(set)
    for build_file in buildfiles:
      address_map = build_file_parser.parse_build_file(build_file)
      for address in address_map.keys():
        build_graph.inject_address_closure(address)
      for address in address_map.keys():
        target = build_graph.get_target(address)
        # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a
        # user vs. targets created by pants at runtime.
        target = self.get_concrete_target(target)
        for dependency in target.dependencies:
          dependency = self.get_concrete_target(dependency)
          dependees_by_target[dependency].add(target)

    roots = set(self.context.target_roots)
    if self._closed:
      for root in roots:
        yield root.address.spec

    for dependant in self.get_dependants(dependees_by_target, roots):
      yield dependant.address.spec
Beispiel #42
0
    def _run_tests(self, test_registry, output_dir, coverage=None):
        if coverage:
            extra_jvm_options = coverage.extra_jvm_options
            classpath_prepend = coverage.classpath_prepend
            classpath_append = coverage.classpath_append
        else:
            extra_jvm_options = []
            classpath_prepend = ()
            classpath_append = ()

        tests_by_properties = test_registry.index(
            lambda tgt: tgt.cwd if tgt.cwd is not None else self._working_dir,
            lambda tgt: tgt.test_platform,
            lambda tgt: tgt.payload.extra_jvm_options,
            lambda tgt: tgt.payload.extra_env_vars,
            lambda tgt: tgt.concurrency, lambda tgt: tgt.threads)

        # the below will be None if not set, and we'll default back to runtime_classpath
        classpath_product = self.context.products.get_data(
            'instrument_classpath')

        result = 0
        for properties, tests in tests_by_properties.items():
            (workdir, platform, target_jvm_options, target_env_vars,
             concurrency, threads) = properties
            for batch in self._partition(tests):
                # Batches of test classes will likely exist within the same targets: dedupe them.
                relevant_targets = {
                    test_registry.get_owning_target(t)
                    for t in batch
                }
                complete_classpath = OrderedSet()
                complete_classpath.update(classpath_prepend)
                complete_classpath.update(
                    JUnit.global_instance().runner_classpath(self.context))
                complete_classpath.update(
                    self.classpath(relevant_targets,
                                   classpath_product=classpath_product))
                complete_classpath.update(classpath_append)
                distribution = JvmPlatform.preferred_jvm_distribution(
                    [platform], self._strict_jvm_version)

                # Override cmdline args with values from junit_test() target that specify concurrency:
                args = self._args(output_dir) + [u'-xmlreport']

                if concurrency is not None:
                    args = remove_arg(args, '-default-parallel')
                    if concurrency == JUnitTests.CONCURRENCY_SERIAL:
                        args = ensure_arg(args,
                                          '-default-concurrency',
                                          param='SERIAL')
                    elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES:
                        args = ensure_arg(args,
                                          '-default-concurrency',
                                          param='PARALLEL_CLASSES')
                    elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_METHODS:
                        args = ensure_arg(args,
                                          '-default-concurrency',
                                          param='PARALLEL_METHODS')
                    elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES_AND_METHODS:
                        args = ensure_arg(args,
                                          '-default-concurrency',
                                          param='PARALLEL_CLASSES_AND_METHODS')

                if threads is not None:
                    args = remove_arg(args,
                                      '-parallel-threads',
                                      has_param=True)
                    args += ['-parallel-threads', str(threads)]

                batch_test_specs = [test.render_test_spec() for test in batch]
                with argfile.safe_args(batch_test_specs,
                                       self.get_options()) as batch_tests:
                    self.context.log.debug('CWD = {}'.format(workdir))
                    self.context.log.debug('platform = {}'.format(platform))
                    with environment_as(**dict(target_env_vars)):
                        result += abs(
                            self._spawn_and_wait(
                                executor=SubprocessExecutor(distribution),
                                distribution=distribution,
                                classpath=complete_classpath,
                                main=JUnit.RUNNER_MAIN,
                                jvm_options=self.jvm_options +
                                extra_jvm_options + list(target_jvm_options),
                                args=args + batch_tests,
                                workunit_factory=self.context.new_workunit,
                                workunit_name='run',
                                workunit_labels=[WorkUnitLabel.TEST],
                                cwd=workdir,
                                synthetic_jar_dir=output_dir,
                                create_synthetic_jar=self.synthetic_classpath,
                            ))

                    if result != 0 and self._fail_fast:
                        break

        if result != 0:

            def error_handler(parse_error):
                # Just log and move on since the result is only used to characterize failures, and raising
                # an error here would just distract from the underlying test failures.
                self.context.log.error(
                    'Error parsing test result file {path}: {cause}'.format(
                        path=parse_error.junit_xml_path,
                        cause=parse_error.cause))

            target_to_failed_test = parse_failed_targets(
                test_registry, output_dir, error_handler)
            failed_targets = sorted(target_to_failed_test,
                                    key=lambda t: t.address.spec)
            error_message_lines = []
            if self._failure_summary:
                for target in failed_targets:
                    error_message_lines.append('\n{indent}{address}'.format(
                        indent=' ' * 4, address=target.address.spec))
                    for test in sorted(target_to_failed_test[target]):
                        error_message_lines.append(
                            '{indent}{classname}#{methodname}'.format(
                                indent=' ' * 8,
                                classname=test.classname,
                                methodname=test.methodname))
            error_message_lines.append(
                '\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.'
                .format(main=JUnit.RUNNER_MAIN,
                        code=result,
                        failed=len(failed_targets),
                        targets=pluralize(len(failed_targets), 'target')))
            raise TestFailedTaskError('\n'.join(error_message_lines),
                                      failed_targets=list(failed_targets))
Beispiel #43
0
 def get_for_targets(self, targets):
     """Gets the union of the products for the given targets, preserving the input order."""
     products = OrderedSet()
     for target in targets:
         products.update(self._products_by_target[target])
     return products
Beispiel #44
0
    def run_tests(self, fail_fast, test_targets, output_dir, coverage):
        test_registry = self._collect_test_targets(test_targets)
        if test_registry.empty:
            return TestResult.successful

        coverage.instrument(output_dir)

        def parse_error_handler(parse_error):
            # Just log and move on since the result is only used to characterize failures, and raising
            # an error here would just distract from the underlying test failures.
            self.context.log.error(
                'Error parsing test result file {path}: {cause}'.format(
                    path=parse_error.xml_path, cause=parse_error.cause))

        # The 'instrument_classpath' product below below will be `None` if not set, and we'll default
        # back to runtime_classpath
        classpath_product = self.context.products.get_data(
            'instrument_classpath')

        result = 0
        for batch_id, (properties,
                       batch) in enumerate(self._iter_batches(test_registry)):
            (workdir, platform, target_jvm_options, target_env_vars,
             concurrency, threads) = properties

            batch_output_dir = output_dir
            if self._batched:
                batch_output_dir = os.path.join(batch_output_dir,
                                                'batch-{}'.format(batch_id))

            run_modifications = coverage.run_modifications(batch_output_dir)

            extra_jvm_options = run_modifications.extra_jvm_options

            # Batches of test classes will likely exist within the same targets: dedupe them.
            relevant_targets = {
                test_registry.get_owning_target(t)
                for t in batch
            }

            complete_classpath = OrderedSet()
            complete_classpath.update(run_modifications.classpath_prepend)
            complete_classpath.update(JUnit.global_instance().runner_classpath(
                self.context))
            complete_classpath.update(
                self.classpath(relevant_targets,
                               classpath_product=classpath_product))

            distribution = JvmPlatform.preferred_jvm_distribution(
                [platform], self._strict_jvm_version)

            # Override cmdline args with values from junit_test() target that specify concurrency:
            args = self._args(fail_fast, batch_output_dir) + [u'-xmlreport']

            if concurrency is not None:
                args = remove_arg(args, '-default-parallel')
                if concurrency == JUnitTests.CONCURRENCY_SERIAL:
                    args = ensure_arg(args,
                                      '-default-concurrency',
                                      param='SERIAL')
                elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES:
                    args = ensure_arg(args,
                                      '-default-concurrency',
                                      param='PARALLEL_CLASSES')
                elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_METHODS:
                    args = ensure_arg(args,
                                      '-default-concurrency',
                                      param='PARALLEL_METHODS')
                elif concurrency == JUnitTests.CONCURRENCY_PARALLEL_CLASSES_AND_METHODS:
                    args = ensure_arg(args,
                                      '-default-concurrency',
                                      param='PARALLEL_CLASSES_AND_METHODS')

            if threads is not None:
                args = remove_arg(args, '-parallel-threads', has_param=True)
                args += ['-parallel-threads', str(threads)]

            batch_test_specs = [test.render_test_spec() for test in batch]
            with argfile.safe_args(batch_test_specs,
                                   self.get_options()) as batch_tests:
                with self.chroot(relevant_targets, workdir) as chroot:
                    self.context.log.debug('CWD = {}'.format(chroot))
                    self.context.log.debug('platform = {}'.format(platform))
                    with environment_as(**dict(target_env_vars)):
                        subprocess_result = self.spawn_and_wait(
                            executor=SubprocessExecutor(distribution),
                            distribution=distribution,
                            classpath=complete_classpath,
                            main=JUnit.RUNNER_MAIN,
                            jvm_options=self.jvm_options + extra_jvm_options +
                            list(target_jvm_options),
                            args=args + batch_tests,
                            workunit_factory=self.context.new_workunit,
                            workunit_name='run',
                            workunit_labels=[WorkUnitLabel.TEST],
                            cwd=chroot,
                            synthetic_jar_dir=batch_output_dir,
                            create_synthetic_jar=self.synthetic_classpath,
                        )
                        self.context.log.debug(
                            'JUnit subprocess exited with result ({})'.format(
                                subprocess_result))
                        result += abs(subprocess_result)

                tests_info = self.parse_test_info(batch_output_dir,
                                                  parse_error_handler,
                                                  ['classname'])
                for test_name, test_info in tests_info.items():
                    test_item = Test(test_info['classname'], test_name)
                    test_target = test_registry.get_owning_target(test_item)
                    self.report_all_info_for_single_test(
                        self.options_scope, test_target, test_name, test_info)

                if result != 0 and fail_fast:
                    break

        if result == 0:
            return TestResult.successful

        target_to_failed_test = parse_failed_targets(test_registry, output_dir,
                                                     parse_error_handler)

        def sort_owning_target(t):
            return t.address.spec if t else None

        failed_targets = sorted(target_to_failed_test, key=sort_owning_target)
        error_message_lines = []
        if self._failure_summary:

            def render_owning_target(t):
                return t.address.reference() if t else '<Unknown Target>'

            for target in failed_targets:
                error_message_lines.append('\n{indent}{owner}'.format(
                    indent=' ' * 4, owner=render_owning_target(target)))
                for test in sorted(target_to_failed_test[target]):
                    error_message_lines.append(
                        '{indent}{classname}#{methodname}'.format(
                            indent=' ' * 8,
                            classname=test.classname,
                            methodname=test.methodname))
        error_message_lines.append(
            '\njava {main} ... exited non-zero ({code}); {failed} failed {targets}.'
            .format(main=JUnit.RUNNER_MAIN,
                    code=result,
                    failed=len(failed_targets),
                    targets=pluralize(len(failed_targets), 'target')))
        return TestResult(msg='\n'.join(error_message_lines),
                          rc=result,
                          failed_targets=failed_targets)
Beispiel #45
0
 def closure(self):
     os = OrderedSet()
     for target in self.dependencies | self.internal_dependencies:
         os.update(target.closure())
     return os
Beispiel #46
0
 def resolve_deps(key):
   deps = OrderedSet()
   for dep in context.config.getlist('protobuf-gen', key):
     deps.update(context.resolve(dep))
   return deps
Beispiel #47
0
    def bundle(self, app):
        """Create a self-contained application bundle.

    The bundle will contain the target classes, dependencies and resources.
    """

        assert (isinstance(app, BundleCreate.App))

        bundle_dir = os.path.join(self.get_options().pants_distdir,
                                  '{}-bundle'.format(app.basename))
        self.context.log.info('creating {}'.format(
            os.path.relpath(bundle_dir, get_buildroot())))

        safe_mkdir(bundle_dir, clean=True)

        classpath = OrderedSet()

        # Create symlinks for both internal and external dependencies under `lib_dir`. This is
        # only needed when not creating a deployjar
        lib_dir = os.path.join(bundle_dir, self.LIBS_DIR)
        if not self.get_options().deployjar:
            os.mkdir(lib_dir)
            runtime_classpath = self.context.products.get_data(
                'runtime_classpath')
            classpath.update(
                ClasspathUtil.create_canonical_classpath(
                    runtime_classpath,
                    app.target.closure(bfs=True,
                                       **self._target_closure_kwargs),
                    lib_dir,
                    internal_classpath_only=False,
                    excludes=app.binary.deploy_excludes,
                ))

        bundle_jar = os.path.join(bundle_dir,
                                  '{}.jar'.format(app.binary.basename))
        with self.monolithic_jar(app.binary,
                                 bundle_jar,
                                 manifest_classpath=classpath) as jar:
            self.add_main_manifest_entry(jar, app.binary)

            # Make classpath complete by adding the monolithic jar.
            classpath.update([jar.path])

        if app.binary.shading_rules:
            for jar_path in classpath:
                # In case `jar_path` is a symlink, this is still safe, shaded jar will overwrite jar_path,
                # original file `jar_path` linked to remains untouched.
                # TODO run in parallel to speed up
                self.shade_jar(shading_rules=app.binary.shading_rules,
                               jar_path=jar_path)

        for bundle in app.bundles:
            for path, relpath in bundle.filemap.items():
                bundle_path = os.path.join(bundle_dir, relpath)
                if not os.path.exists(path):
                    raise TaskError(
                        'Given path: {} does not exist in target {}'.format(
                            path, app.address.spec))
                safe_mkdir(os.path.dirname(bundle_path))
                os.symlink(path, bundle_path)

        return bundle_dir
Beispiel #48
0
 def configurations(self):
     confs = OrderedSet(self._configurations)
     confs.update(artifact.conf for artifact in self.artifacts
                  if artifact.conf)
     return list(confs)
Beispiel #49
0
 def all_imported_jar_deps(self):
   jar_deps = OrderedSet()
   for jar_lib in self.imported_targets:
     jar_deps.update(jar_lib.jar_dependencies)
   return list(jar_deps)
Beispiel #50
0
        def process_target(current_target):
            """
      :type current_target:pants.build_graph.target.Target
      """
            def get_target_type(tgt):
                def is_test(t):
                    return isinstance(t, JUnitTests) or isinstance(
                        t, PythonTests)

                if is_test(tgt):
                    return ExportTask.SourceRootTypes.TEST
                else:
                    if (isinstance(tgt, Resources)
                            and tgt in resource_target_map
                            and is_test(resource_target_map[tgt])):
                        return ExportTask.SourceRootTypes.TEST_RESOURCE
                    elif isinstance(tgt, Resources):
                        return ExportTask.SourceRootTypes.RESOURCE
                    else:
                        return ExportTask.SourceRootTypes.SOURCE

            info = {
                'targets': [],
                'libraries': [],
                'roots': [],
                'id':
                current_target.id,
                'target_type':
                get_target_type(current_target),
                # NB: is_code_gen should be removed when export format advances to 1.1.0 or higher
                'is_code_gen':
                current_target.is_synthetic,
                'is_synthetic':
                current_target.is_synthetic,
                'pants_target_type':
                self._get_pants_target_alias(type(current_target)),
            }

            if not current_target.is_synthetic:
                info['globs'] = current_target.globs_relative_to_buildroot()
                if self.get_options().sources:
                    info['sources'] = list(
                        current_target.sources_relative_to_buildroot())

            info['transitive'] = current_target.transitive
            info['scope'] = str(current_target.scope)
            info['is_target_root'] = current_target in target_roots_set

            if isinstance(current_target, PythonRequirementLibrary):
                reqs = current_target.payload.get_field_value(
                    'requirements', set())
                """:type : set[pants.backend.python.python_requirement.PythonRequirement]"""
                info['requirements'] = [req.key for req in reqs]

            if isinstance(current_target, PythonTarget):
                interpreter_for_target = self._interpreter_cache.select_interpreter_for_targets(
                    [current_target])
                if interpreter_for_target is None:
                    raise TaskError(
                        'Unable to find suitable interpreter for {}'.format(
                            current_target.address))
                python_interpreter_targets_mapping[
                    interpreter_for_target].append(current_target)
                info['python_interpreter'] = str(
                    interpreter_for_target.identity)

            def iter_transitive_jars(jar_lib):
                """
        :type jar_lib: :class:`pants.backend.jvm.targets.jar_library.JarLibrary`
        :rtype: :class:`collections.Iterator` of
                :class:`pants.java.jar.M2Coordinate`
        """
                if classpath_products:
                    jar_products = classpath_products.get_artifact_classpath_entries_for_targets(
                        (jar_lib, ))
                    for _, jar_entry in jar_products:
                        coordinate = jar_entry.coordinate
                        # We drop classifier and type_ since those fields are represented in the global
                        # libraries dict and here we just want the key into that dict (see `_jar_id`).
                        yield M2Coordinate(org=coordinate.org,
                                           name=coordinate.name,
                                           rev=coordinate.rev)

            target_libraries = OrderedSet()
            if isinstance(current_target, JarLibrary):
                target_libraries = OrderedSet(
                    iter_transitive_jars(current_target))
            for dep in current_target.dependencies:
                info['targets'].append(dep.address.spec)
                if isinstance(dep, JarLibrary):
                    for jar in dep.jar_dependencies:
                        target_libraries.add(
                            M2Coordinate(jar.org, jar.name, jar.rev))
                    # Add all the jars pulled in by this jar_library
                    target_libraries.update(iter_transitive_jars(dep))
                if isinstance(dep, Resources):
                    resource_target_map[dep] = current_target

            if isinstance(current_target, ScalaLibrary):
                for dep in current_target.java_sources:
                    info['targets'].append(dep.address.spec)
                    process_target(dep)

            if isinstance(current_target, JvmTarget):
                info['excludes'] = [
                    self._exclude_id(exclude)
                    for exclude in current_target.excludes
                ]
                info['platform'] = current_target.platform.name
                if hasattr(current_target, 'test_platform'):
                    info['test_platform'] = current_target.test_platform.name

            info['roots'] = [{
                'source_root': source_root_package_prefix[0],
                'package_prefix': source_root_package_prefix[1]
            } for source_root_package_prefix in self._source_roots_for_target(
                current_target)]

            if classpath_products:
                info['libraries'] = [
                    self._jar_id(lib) for lib in target_libraries
                ]
            targets_map[current_target.address.spec] = info
 def get_all_deps():
   all_deps = OrderedSet()
   all_deps.update(Pants('3rdparty/python:antlr-%s' % antlr_version).resolve())
   if dependencies:
     all_deps.update(dependencies)
   return all_deps
Beispiel #52
0
class InternalTarget(Target):
    """A baseclass for targets that support an optional dependency set."""
    @classmethod
    def check_cycles(cls, internal_target):
        """Validates the given InternalTarget has no circular dependencies.  Raises CycleException if
    it does."""

        dep_stack = OrderedSet()

        def descend(internal_dep):
            if internal_dep in dep_stack:
                raise InternalTarget_CycleException(dep_stack, internal_dep)
            if hasattr(internal_dep, 'internal_dependencies'):
                dep_stack.add(internal_dep)
                for dep in internal_dep.internal_dependencies:
                    descend(dep)
                dep_stack.remove(internal_dep)

        descend(internal_target)

    @classmethod
    def sort_targets(cls, internal_targets):
        """Returns a list of targets that internal_targets depend on sorted from most dependent to
    least."""

        roots = OrderedSet()
        inverted_deps = collections.defaultdict(
            OrderedSet)  # target -> dependent targets
        visited = set()

        def invert(target):
            if target not in visited:
                visited.add(target)
                if target.internal_dependencies:
                    for internal_dependency in target.internal_dependencies:
                        if isinstance(internal_dependency, InternalTarget):
                            inverted_deps[internal_dependency].add(target)
                            invert(internal_dependency)
                else:
                    roots.add(target)

        for internal_target in internal_targets:
            invert(internal_target)

        sorted = []
        visited.clear()

        def topological_sort(target):
            if target not in visited:
                visited.add(target)
                if target in inverted_deps:
                    for dep in inverted_deps[target]:
                        topological_sort(dep)
                sorted.append(target)

        for root in roots:
            topological_sort(root)

        return sorted

    @classmethod
    def coalesce_targets(cls, internal_targets, discriminator):
        """Returns a list of targets internal_targets depend on sorted from most dependent to least and
    grouped where possible by target type as categorized by the given discriminator."""

        sorted_targets = InternalTarget.sort_targets(internal_targets)

        # can do no better for any of these:
        # []
        # [a]
        # [a,b]
        if len(sorted_targets) <= 2:
            return sorted_targets

        # For these, we'd like to coalesce if possible, like:
        # [a,b,a,c,a,c] -> [a,a,a,b,c,c]
        # adopt a quadratic worst case solution, when we find a type change edge, scan forward for
        # the opposite edge and then try to swap dependency pairs to move the type back left to its
        # grouping.  If the leftwards migration fails due to a dependency constraint, we just stop
        # and move on leaving "type islands".
        current_type = None

        # main scan left to right no backtracking
        for i in range(len(sorted_targets) - 1):
            current_target = sorted_targets[i]
            if current_type != discriminator(current_target):
                scanned_back = False

                # scan ahead for next type match
                for j in range(i + 1, len(sorted_targets)):
                    look_ahead_target = sorted_targets[j]
                    if current_type == discriminator(look_ahead_target):
                        scanned_back = True

                        # swap this guy as far back as we can
                        for k in range(j, i, -1):
                            previous_target = sorted_targets[k - 1]
                            mismatching_types = current_type != discriminator(
                                previous_target)
                            not_a_dependency = look_ahead_target not in previous_target.internal_dependencies
                            if mismatching_types and not_a_dependency:
                                sorted_targets[k] = sorted_targets[k - 1]
                                sorted_targets[k - 1] = look_ahead_target
                            else:
                                break  # out of k

                        break  # out of j

                if not scanned_back:  # done with coalescing the current type, move on to next
                    current_type = discriminator(current_target)

        return sorted_targets

    def sort(self):
        """Returns a list of targets this target depends on sorted from most dependent to least."""

        return InternalTarget.sort_targets([self])

    def coalesce(self, discriminator):
        """Returns a list of targets this target depends on sorted from most dependent to least and
    grouped where possible by target type as categorized by the given discriminator."""

        return InternalTarget.coalesce_targets([self], discriminator)

    def __init__(self, name, dependencies, is_meta):
        Target.__init__(self, name, is_meta)

        self.resolved_dependencies = OrderedSet()
        self.internal_dependencies = OrderedSet()
        self.jar_dependencies = OrderedSet()

        self.update_dependencies(dependencies)

    def update_dependencies(self, dependencies):
        if dependencies:
            for dependency in dependencies:
                for resolved_dependency in dependency.resolve():
                    self.resolved_dependencies.add(resolved_dependency)
                    if isinstance(resolved_dependency, InternalTarget):
                        self.internal_dependencies.add(resolved_dependency)
                    self.jar_dependencies.update(
                        resolved_dependency._as_jar_dependencies())

    def _walk(self, walked, work, predicate=None):
        Target._walk(self, walked, work, predicate)
        for dep in self.resolved_dependencies:
            if isinstance(dep, Target) and not dep in walked:
                walked.add(dep)
                if not predicate or predicate(dep):
                    additional_targets = work(dep)
                    dep._walk(walked, work, predicate)
                    if additional_targets:
                        for additional_target in additional_targets:
                            additional_target._walk(walked, work, predicate)
Beispiel #53
0
 def calculate_tests(self, targets):
     tests = OrderedSet()
     for target in targets:
         if target.is_scala and target.is_test:
             tests.update(target.sources_relative_to_buildroot())
     return tests
Beispiel #54
0
  def wrapper(func):
    if not inspect.isfunction(func):
      raise ValueError('The @rule decorator must be applied innermost of all decorators.')

    caller_frame = inspect.stack()[1][0]
    source = inspect.getsource(func)
    beginning_indent = _get_starting_indent(source)
    if beginning_indent:
      source = "\n".join(line[beginning_indent:] for line in source.split("\n"))
    module_ast = ast.parse(source)

    def resolve_type(name):
      resolved = caller_frame.f_globals.get(name) or caller_frame.f_builtins.get(name)
      if not isinstance(resolved, type):
        raise ValueError('Expected a `type` constructor, but got: {}'.format(name))
      return resolved

    gets = OrderedSet()
    rule_func_node = assert_single_element(
      node for node in ast.iter_child_nodes(module_ast)
      if isinstance(node, ast.FunctionDef) and node.name == func.__name__)

    parents_table = {}
    for parent in ast.walk(rule_func_node):
      for child in ast.iter_child_nodes(parent):
        parents_table[child] = parent

    rule_visitor = _RuleVisitor(
      func=func,
      func_node=rule_func_node,
      func_source=source,
      orig_indent=beginning_indent,
      frame=caller_frame,
      parents_table=parents_table,
    )
    rule_visitor.visit(rule_func_node)
    gets.update(
      Get.create_statically_for_rule_graph(resolve_type(p), resolve_type(s))
      for p, s in rule_visitor.gets)

    # For @console_rule, redefine the function to avoid needing a literal return of the output type.
    if for_goal:
      def goal_and_return(*args, **kwargs):
        res = func(*args, **kwargs)
        if isinstance(res, GeneratorType):
          # Return a generator with an output_type instance appended.
          return _terminated(res, output_type())
        elif res is not None:
          raise Exception('A @console_rule should not have a return value.')
        return output_type()
      functools.update_wrapper(goal_and_return, func)
      wrapped_func = goal_and_return
    else:
      wrapped_func = func

    wrapped_func.rule = TaskRule(
        output_type,
        tuple(input_selectors),
        wrapped_func,
        input_gets=tuple(gets),
        goal=for_goal,
        cacheable=cacheable,
      )

    return wrapped_func
Beispiel #55
0
class Context(object):
    """Contains the context for a single run of pants.

  Goal implementations can access configuration data from pants.ini and any flags they have exposed
  here as well as information about the targets involved in the run.

  Advanced uses of the context include adding new targets to it for upstream or downstream goals to
  operate on and mapping of products a goal creates to the targets the products are associated with.
  """
    class Log(object):
        """A logger facade that logs into the pants reporting framework."""
        def __init__(self, run_tracker):
            self._run_tracker = run_tracker

        def debug(self, *msg_elements):
            self._run_tracker.log(Report.DEBUG, *msg_elements)

        def info(self, *msg_elements):
            self._run_tracker.log(Report.INFO, *msg_elements)

        def warn(self, *msg_elements):
            self._run_tracker.log(Report.WARN, *msg_elements)

        def error(self, *msg_elements):
            self._run_tracker.log(Report.ERROR, *msg_elements)

        def fatal(self, *msg_elements):
            self._run_tracker.log(Report.FATAL, *msg_elements)

    def __init__(self,
                 config,
                 options,
                 run_tracker,
                 target_roots,
                 requested_goals=None,
                 lock=Lock.unlocked(),
                 log=None,
                 target_base=None):
        self._config = config
        self._options = options
        self.run_tracker = run_tracker
        self._lock = lock
        self._log = log or Context.Log(run_tracker)
        self._target_base = target_base or Target
        self._state = {}
        self._products = Products()
        self._buildroot = get_buildroot()
        self.requested_goals = requested_goals or []

        self.replace_targets(target_roots)

    @property
    def config(self):
        """Returns a Config object containing the configuration data found in pants.ini."""
        return self._config

    @property
    def options(self):
        """Returns the command line options parsed at startup."""
        return self._options

    @property
    def lock(self):
        """Returns the global pants run lock so a goal can release it if needed."""
        return self._lock

    @property
    def log(self):
        """Returns the preferred logger for goals to use."""
        return self._log

    @property
    def products(self):
        """Returns the Products manager for the current run."""
        return self._products

    @property
    def target_roots(self):
        """Returns the targets specified on the command line.

    This set is strictly a subset of all targets in play for the run as returned by self.targets().
    Note that for a command line invocation that uses wildcard selectors : or ::, the targets
    globbed by the wildcards are considered to be target roots.
    """
        return self._target_roots

    def __str__(self):
        return 'Context(id:%s, state:%s, targets:%s)' % (self.id, self.state,
                                                         self.targets())

    @contextmanager
    def new_workunit(self, name, labels=list(), cmd=''):
        with self.run_tracker.new_workunit(name=name, labels=labels,
                                           cmd=cmd) as workunit:
            yield workunit

    def acquire_lock(self):
        """ Acquire the global lock for the root directory associated with this context. When
    a goal requires serialization, it will call this to acquire the lock.
    """
        def onwait(pid):
            print('Waiting on pants process %s to complete' %
                  _process_info(pid),
                  file=sys.stderr)
            return True

        if self._lock.is_unlocked():
            runfile = os.path.join(self._buildroot, '.pants.run')
            self._lock = Lock.acquire(runfile, onwait=onwait)

    def release_lock(self):
        """Release the global lock if it's held.
    Returns True if the lock was held before this call.
    """
        if self._lock.is_unlocked():
            return False
        else:
            self._lock.release()
            self._lock = Lock.unlocked()
            return True

    def is_unlocked(self):
        """Whether the global lock object is actively holding the lock."""
        return self._lock.is_unlocked()

    def replace_targets(self, target_roots):
        """Replaces all targets in the context with the given roots and their transitive
    dependencies.
    """
        self._target_roots = list(target_roots)

        self._targets = OrderedSet()
        for target in self._target_roots:
            self.add_target(target)
        self.id = Target.identify(self._targets)

    def add_target(self, target):
        """Adds a target and its transitive dependencies to the run context.

    The target is not added to the target roots.
    """
        def add_targets(tgt):
            self._targets.update(tgt for tgt in tgt.resolve()
                                 if isinstance(tgt, self._target_base))

        target.walk(add_targets)

    def add_new_target(self, target_base, target_type, *args, **kwargs):
        """Creates a new target, adds it to the context and returns it.

    This method ensures the target resolves files against the given target_base, creating the
    directory if needed and registering a source root.
    """
        if 'derived_from' in kwargs:
            derived_from = kwargs.get('derived_from')
            del kwargs['derived_from']
        else:
            derived_from = None
        target = self._create_new_target(target_base, target_type, *args,
                                         **kwargs)
        self.add_target(target)
        if derived_from:
            target.derived_from = derived_from
        return target

    def _create_new_target(self, target_base, target_type, *args, **kwargs):
        if not os.path.exists(target_base):
            os.makedirs(target_base)
        SourceRoot.register(target_base, target_type)
        with ParseContext.temp(target_base):
            return target_type(*args, **kwargs)

    def remove_target(self, target):
        """Removes the given Target object from the context completely if present."""
        if target in self.target_roots:
            self.target_roots.remove(target)
        self._targets.discard(target)

    def targets(self, predicate=None):
        """Selects targets in-play in this run from the target roots and their transitive dependencies.

    If specified, the predicate will be used to narrow the scope of targets returned.
    """
        return filter(predicate, self._targets)

    def dependents(self, on_predicate=None, from_predicate=None):
        """Returns  a map from targets that satisfy the from_predicate to targets they depend on that
      satisfy the on_predicate.
    """
        core = set(self.targets(on_predicate))
        dependees = defaultdict(set)
        for target in self.targets(from_predicate):
            if hasattr(target, 'dependencies'):
                for dependency in target.dependencies:
                    if dependency in core:
                        dependees[target].add(dependency)
        return dependees

    def resolve(self, spec):
        """Returns an iterator over the target(s) the given address points to."""
        with ParseContext.temp():
            return Pants(spec).resolve()

    @contextmanager
    def state(self, key, default=None):
        value = self._state.get(key, default)
        yield value
        self._state[key] = value

    @contextmanager
    def timing(self, label):
        if self.timer:
            with self.timer.timing(label):
                yield
        else:
            yield
Beispiel #56
0
 def synthetic_target_extra_dependencies(self, target, target_workdir):
     deps = OrderedSet(self._thrift_dependencies_for_target(target))
     deps.update(target.dependencies)
     return deps
Beispiel #57
0
  def execute(self, targets):
    """Resolves the specified confs for the configured targets and returns an iterator over
    tuples of (conf, jar path).
    """
    def dirname_for_requested_targets(targets):
      """Where we put the classpath file for this set of targets."""
      sha = hashlib.sha1()
      for t in targets:
        sha.update(t.id)
      return sha.hexdigest()

    def is_classpath(target):
      return is_jar(target) or (
        is_internal(target) and any(jar for jar in target.jar_dependencies if jar.rev)
      )

    groups = self.context.products.get_data('exclusives_groups')

    # Below, need to take the code that actually execs ivy, and invoke it once for each
    # group. Then after running ivy, we need to take the resulting classpath, and load it into
    # the build products.

    # The set of groups we need to consider is complicated:
    # - If there are no conflicting exclusives (ie, there's only one entry in the map),
    #   then we just do the one.
    # - If there are conflicts, then there will be at least three entries in the groups map:
    #   - the group with no exclusives (X)
    #   - the two groups that are in conflict (A and B).
    # In the latter case, we need to do the resolve twice: Once for A+X, and once for B+X,
    # because things in A and B can depend on things in X; and so they can indirectly depend
    # on the dependencies of X. (I think this well be covered by the computed transitive dependencies of
    # A and B. But before pushing this change, review this comment, and make sure that this is
    # working correctly.
    for group_key in groups.get_group_keys():
      # Narrow the groups target set to just the set of targets that we're supposed to build.
      # Normally, this shouldn't be different from the contents of the group.
      group_targets = groups.get_targets_for_group_key(group_key) & set(targets)

      classpath_targets = OrderedSet()
      for target in group_targets:
        classpath_targets.update(filter(is_classpath, filter(is_concrete, target.resolve())))

      target_workdir = os.path.join(self._work_dir, dirname_for_requested_targets(group_targets))
      target_classpath_file = os.path.join(target_workdir, 'classpath')
      with self.invalidated(classpath_targets, only_buildfiles=True,
                            invalidate_dependents=True) as invalidation_check:
        # Note that it's possible for all targets to be valid but for no classpath file to exist at
        # target_classpath_file, e.g., if we previously build a superset of targets.
        if invalidation_check.invalid_vts or not os.path.exists(target_classpath_file):
          self._exec_ivy(target_workdir, targets, [
            '-cachepath', target_classpath_file,
            '-confs'
          ] + self._confs)

      if not os.path.exists(target_classpath_file):
        print ('Ivy failed to create classpath file at %s %s' % target_classpath_file)

      def safe_link(src, dest):
        if os.path.exists(dest):
          os.unlink(dest)
        os.symlink(src, dest)

      # TODO(benjy): Is this symlinking valid in the presence of multiple exclusives groups?
      # Should probably get rid of it and use a local artifact cache instead.
      # Symlink to the current classpath file.
      safe_link(target_classpath_file, self._classpath_file)

      # Symlink to the current ivy.xml file (useful for IDEs that read it).
      ivyxml_symlink = os.path.join(self._work_dir, 'ivy.xml')
      target_ivyxml = os.path.join(target_workdir, 'ivy.xml')
      safe_link(target_ivyxml, ivyxml_symlink)

      if os.path.exists(self._classpath_file):
        with self._cachepath(self._classpath_file) as classpath:
          for path in classpath:
            if self._map_jar(path):
              for conf in self._confs:
                groups.update_compatible_classpaths(group_key, [(conf, path.strip())])

    if self._report:
      self._generate_ivy_report()

    if self.context.products.isrequired("ivy_jar_products"):
      self._populate_ivy_jar_products()

    create_jardeps_for = self.context.products.isrequired(self._mapfor_typename())
    if create_jardeps_for:
      genmap = self.context.products.get(self._mapfor_typename())
      for target in filter(create_jardeps_for, targets):
        self._mapjars(genmap, target)
Beispiel #58
0
    def wrapper(func):
        if not inspect.isfunction(func):
            raise ValueError(
                'The @rule decorator must be applied innermost of all decorators.'
            )

        owning_module = sys.modules[func.__module__]
        source = inspect.getsource(func)
        beginning_indent = _get_starting_indent(source)
        if beginning_indent:
            source = "\n".join(line[beginning_indent:]
                               for line in source.split("\n"))
        module_ast = ast.parse(source)

        def resolve_type(name):
            resolved = getattr(owning_module, name,
                               None) or owning_module.__builtins__.get(
                                   name, None)
            if resolved is None:
                raise ValueError(
                    f'Could not resolve type `{name}` in top level of module {owning_module.__name__}'
                )
            elif not isinstance(resolved, type):
                raise ValueError(
                    f'Expected a `type` constructor for `{name}`, but got: {resolved} (type `{type(resolved).__name__}`)'
                )
            return resolved

        gets = OrderedSet()
        rule_func_node = assert_single_element(
            node for node in ast.iter_child_nodes(module_ast) if
            isinstance(node, ast.FunctionDef) and node.name == func.__name__)

        parents_table = {}
        for parent in ast.walk(rule_func_node):
            for child in ast.iter_child_nodes(parent):
                parents_table[child] = parent

        rule_visitor = _RuleVisitor(
            func=func,
            func_node=rule_func_node,
            func_source=source,
            orig_indent=beginning_indent,
            parents_table=parents_table,
        )
        rule_visitor.visit(rule_func_node)
        gets.update(
            Get.create_statically_for_rule_graph(resolve_type(p),
                                                 resolve_type(s))
            for p, s in rule_visitor.gets)

        # Register dependencies for @console_rule/Goal.
        if is_goal_cls:
            dependency_rules = (optionable_rule(output_type.Options), )
        else:
            dependency_rules = None

        func.rule = TaskRule(
            output_type,
            tuple(input_selectors),
            func,
            input_gets=tuple(gets),
            dependency_rules=dependency_rules,
            cacheable=cacheable,
        )

        return func
Beispiel #59
0
 def create_javadeps():
   gen_info = context.config.getlist('thriftstore-dml-gen', 'javadeps')
   deps = OrderedSet()
   for dep in gen_info:
     deps.update(context.resolve(dep))
   return deps
Beispiel #60
0
    def console_output(self, _):
        buildfiles = OrderedSet()
        if self._dependees_type:
            base_paths = OrderedSet()
            for dependees_type in self._dependees_type:
                # FIXME(pl): This should be a standard function provided by the plugin/BuildFileParser
                # machinery
                try:
                    # Try to do a fully qualified import 1st for filtering on custom types.
                    from_list, module, type_name = dependees_type.rsplit(
                        '.', 2)
                    module = __import__('%s.%s' % (from_list, module),
                                        fromlist=[from_list])
                    target_type = getattr(module, type_name)
                except (ImportError, ValueError):
                    # Fall back on pants provided target types.
                    if dependees_type not in pants.base.build_file_aliases.target_aliases:
                        raise TaskError('Invalid type name: %s' %
                                        dependees_type)
                    target_type = pants.base.build_file_aliases.target_aliases[
                        dependees_type]
                # Find the SourceRoot for the given input type
                base_paths.update(SourceRoot.roots(target_type))
            if not base_paths:
                raise TaskError(
                    'No SourceRoot set for any target type in %s.' %
                    self._dependees_type +
                    '\nPlease define a source root in BUILD file as:' +
                    '\n\tsource_root(\'<src-folder>\', %s)' %
                    ', '.join(self._dependees_type))
            for base_path in base_paths:
                buildfiles.update(
                    BuildFile.scan_buildfiles(get_buildroot(), base_path))
        else:
            buildfiles = BuildFile.scan_buildfiles(get_buildroot())

        build_graph = self.context.build_graph
        build_file_parser = self.context.build_file_parser

        dependees_by_target = defaultdict(set)
        for build_file in buildfiles:
            build_file_parser.parse_build_file(build_file)
            for address in build_file_parser.addresses_by_build_file[
                    build_file]:
                build_file_parser.inject_spec_closure_into_build_graph(
                    address.spec, build_graph)
            for address in build_file_parser.addresses_by_build_file[
                    build_file]:
                target = build_graph.get_target(address)
                # TODO(John Sirois): tighten up the notion of targets written down in a BUILD by a
                # user vs. targets created by pants at runtime.
                target = self.get_concrete_target(target)
                for dependency in target.dependencies:
                    dependency = self.get_concrete_target(dependency)
                    dependees_by_target[dependency].add(target)

        roots = set(self.context.target_roots)
        if self._closed:
            for root in roots:
                yield root.address.build_file_spec

        for dependant in self.get_dependants(dependees_by_target, roots):
            yield dependant.address.build_file_spec