Beispiel #1
0
    def transitive_subgraph_of_addresses_bfs(self, addresses, predicate=None, leveled_predicate=None):
        """Returns the transitive dependency closure of `addresses` using BFS.

    :API: public

    :param list<Address> addresses: The closure of `addresses` will be walked.
    :param function predicate: If this parameter is not given, no Targets will be filtered
      out of the closure.  If it is given, any Target which fails the predicate will not be
      walked, nor will its dependencies.  Thus predicate effectively trims out any subgraph
      that would only be reachable through Targets that fail the predicate.
    :param function leveled_predicate: Behaves identically to predicate, but takes the depth of the
      target in the search tree as a second parameter, and it is checked just before a dependency is
      expanded.
    """
        ordered_closure = OrderedSet()
        # Use the DepthAgnosticWalk if we can, because DepthAwareWalk does a bit of extra work that can
        # slow things down by few millis.
        walker = self.DepthAwareWalk if leveled_predicate else self.DepthAgnosticWalk
        walk = walker()
        to_walk = deque((0, addr) for addr in addresses)
        while len(to_walk) > 0:
            level, address = to_walk.popleft()
            target = self._target_by_address[address]
            if not walk.expand_once(target, level):
                continue
            if predicate and not predicate(target):
                continue
            if walk.do_work_once(target):
                ordered_closure.add(target)
            for addr in self._target_dependencies_by_address[address]:
                if not leveled_predicate or leveled_predicate(self._target_by_address[addr], level):
                    to_walk.append((level + 1, addr))
        return ordered_closure
Beispiel #2
0
  def parse_args(args):
    goals = OrderedSet()
    specs = OrderedSet()
    help = False
    explicit_multi = False

    def is_spec(spec):
      return os.sep in spec or ':' in spec

    for i, arg in enumerate(args):
      help = help or 'help' == arg
      if not arg.startswith('-'):
        specs.add(arg) if is_spec(arg) else goals.add(arg)
      elif '--' == arg:
        if specs:
          raise GoalError('Cannot intermix targets with goals when using --. Targets should '
                          'appear on the right')
        explicit_multi = True
        del args[i]
        break

    if explicit_multi:
      spec_offset = len(goals) + 1 if help else len(goals)
      specs.update(arg for arg in args[spec_offset:] if not arg.startswith('-'))

    return goals, specs
Beispiel #3
0
    def add_resolved_requirements(self, reqs, platforms=None):
        """Multi-platform dependency resolution for PEX files.

    :param builder: Dump the requirements into this builder.
    :param interpreter: The :class:`PythonInterpreter` to resolve requirements for.
    :param reqs: A list of :class:`PythonRequirement` to resolve.
    :param log: Use this logger.
    :param platforms: A list of :class:`Platform`s to resolve requirements for.
                      Defaults to the platforms specified by PythonSetup.
    """
        deduped_reqs = OrderedSet(reqs)
        find_links = OrderedSet()
        for req in deduped_reqs:
            self._log.debug('  Dumping requirement: {}'.format(req))
            self._builder.add_requirement(req.requirement)
            if req.repository:
                find_links.add(req.repository)

        # Resolve the requirements into distributions.
        distributions = self._resolve_multi(self._builder.interpreter,
                                            deduped_reqs, platforms,
                                            find_links)
        locations = set()
        for platform, dists in distributions.items():
            for dist in dists:
                if dist.location not in locations:
                    self._log.debug('  Dumping distribution: .../{}'.format(
                        os.path.basename(dist.location)))
                    self._builder.add_distribution(dist)
                locations.add(dist.location)
Beispiel #4
0
  def _register(cls, source_root_dir, *allowed_target_types):
    """Registers a source root.

    :source_root_dir The source root directory against which we resolve source paths,
                     relative to the build root.
    :allowed_target_types Optional list of target types. If specified, we enforce that
                          only targets of those types appear under this source root.
    """
    # Verify that source_root_dir doesn't reach outside buildroot.
    buildroot = os.path.normpath(get_buildroot())
    if source_root_dir.startswith(buildroot):
      abspath = os.path.normpath(source_root_dir)
    else:
      abspath = os.path.normpath(os.path.join(buildroot, source_root_dir))
    if not abspath.startswith(buildroot):
      raise ValueError('Source root %s is not under the build root %s' % (abspath, buildroot))
    source_root_dir = os.path.relpath(abspath, buildroot)

    types = cls._TYPES_BY_ROOT.get(source_root_dir)
    if types is None:
      types = OrderedSet()
      cls._TYPES_BY_ROOT[source_root_dir] = types

    for allowed_target_type in allowed_target_types:
      types.add(allowed_target_type)
      roots = cls._ROOTS_BY_TYPE.get(allowed_target_type)
      if roots is None:
        roots = OrderedSet()
        cls._ROOTS_BY_TYPE[allowed_target_type] = roots
      roots.add(source_root_dir)
Beispiel #5
0
class RootedProducts(object):
  """File products of a build that have a concept of a 'root' directory.

  E.g., classfiles, under a root package directory."""
  def __init__(self, root):
    self._root = root
    self._rel_paths = OrderedSet()

  def add_abs_paths(self, abs_paths):
    for abs_path in abs_paths:
      if not abs_path.startswith(self._root):
        raise Exception('{} is not under {}'.format(abs_path, self._root))
      self._rel_paths.add(os.path.relpath(abs_path, self._root))

  def add_rel_paths(self, rel_paths):
    self._rel_paths.update(rel_paths)

  def root(self):
    return self._root

  def rel_paths(self):
    return self._rel_paths

  def abs_paths(self):
    for relpath in self._rel_paths:
      yield os.path.join(self._root, relpath)

  def __bool__(self):
    return self._rel_paths

  __nonzero__ = __bool__
Beispiel #6
0
  def _compute_sources(self, target):
    relative_sources = OrderedSet()
    source_roots = OrderedSet()

    def capture_and_relativize_to_source_root(source):
      source_root = self.context.source_roots.find_by_path(source)
      if not source_root:
        source_root = self.context.source_roots.find(target)
      source_roots.add(source_root.path)
      return fast_relpath(source, source_root.path)

    if target.payload.get_field_value('ordered_sources'):
      # Re-match the filespecs against the sources in order to apply them in the literal order
      # they were specified in.
      filespec = target.globs_relative_to_buildroot()
      excludes = filespec.get('excludes', [])
      for filespec in filespec.get('globs', []):
        sources = [s for s in target.sources_relative_to_buildroot()
                   if globs_matches([s], [filespec], excludes)]
        if len(sources) != 1:
          raise TargetDefinitionException(
              target,
              'With `ordered_sources=True`, expected one match for each file literal, '
              'but got: {} for literal `{}`.'.format(sources, filespec)
            )
        relative_sources.add(capture_and_relativize_to_source_root(sources[0]))
    else:
      # Otherwise, use the default (unspecified) snapshot ordering.
      for source in target.sources_relative_to_buildroot():
        relative_sources.add(capture_and_relativize_to_source_root(source))
    return relative_sources, source_roots
Beispiel #7
0
  def _create_doc_target(self):
    all_sources = []
    all_deps = OrderedSet()
    for target in self.targets:
      if not self.only_provides or is_exported(target):
        for source in target.sources:
          source_path = os.path.join(self.java_src_prefix, source)
          if os.path.exists(source_path):
            all_sources.append(source_path)
          else:
            print "skipping %s" % source_path

          for jar_dep in target.jar_dependencies:
            if jar_dep.rev:
              all_deps.add(copy(jar_dep).intransitive())

    def create_meta_target():
      return JavaLibrary('pants.doc.deps',
                         all_sources,
                         provides = None,
                         dependencies = all_deps,
                         excludes = None,
                         resources = None,
                         binary_resources = None,
                         deployjar = False,
                         buildflags = None,
                         is_meta = True)

    # TODO(John Sirois): Find a better way to do_in_context when we don't care about the context
    return list(self.targets)[0].do_in_context(create_meta_target)
Beispiel #8
0
class RootedProducts(object):
    """File products of a build that have a concept of a 'root' directory.

  E.g., classfiles, under a root package directory."""
    def __init__(self, root):
        self._root = root
        self._rel_paths = OrderedSet()

    def add_abs_paths(self, abs_paths):
        for abs_path in abs_paths:
            self._rel_paths.add(fast_relpath(abs_path, self._root))

    def add_rel_paths(self, rel_paths):
        self._rel_paths.update(rel_paths)

    def root(self):
        return self._root

    def rel_paths(self):
        return self._rel_paths

    def abs_paths(self):
        for relpath in self._rel_paths:
            yield os.path.join(self._root, relpath)

    def __bool__(self):
        return self._rel_paths

    __nonzero__ = __bool__
  def parse_addresses(self, specs, fail_fast=False):
    """Process a list of command line specs and perform expansion.  This method can expand a list
    of command line specs.
    :param list specs: either a single spec string or a list of spec strings.
    :return: a generator of specs parsed into addresses.
    :raises: CmdLineSpecParser.BadSpecError if any of the address selectors could not be parsed.
    """
    specs = maybe_list(specs)

    addresses = OrderedSet()
    for spec in specs:
      for address in self._parse_spec(spec, fail_fast):
        addresses.add(address)

    results = filter(self._not_excluded_address, addresses)

    # Print debug information about the excluded targets
    if logger.getEffectiveLevel() <= logging.DEBUG and self._exclude_patterns:
      logger.debug('excludes:\n  {excludes}'
                   .format(excludes='\n  '.join(self._exclude_target_regexps)))
      targets = ', '.join(self._excluded_target_map[CmdLineSpecParser._UNMATCHED_KEY])
      logger.debug('Targets after excludes: {targets}'.format(targets=targets))
      excluded_count = 0
      for pattern, targets in self._excluded_target_map.iteritems():
        if pattern != CmdLineSpecParser._UNMATCHED_KEY:
          logger.debug('Targets excluded by pattern {pattern}\n  {targets}'
                       .format(pattern=pattern,
                               targets='\n  '.join(targets)))
          excluded_count += len(targets)
      logger.debug('Excluded {count} target{plural}.'
                   .format(count=excluded_count,
                           plural=('s' if excluded_count != 1 else '')))
    return results
    def _format_args_for_target(self, target, target_workdir):
        """Calculate the arguments to pass to the command line for a single target."""

        relative_proto_files = OrderedSet()
        if target.payload.proto_files:
            relative_proto_files.update(target.payload.proto_files)
        else:
            sources = OrderedSet(target.sources_relative_to_buildroot())
            if not self.validate_sources_present(sources, [target]):
                return None
            # Compute the source path relative to the 'source root' which is the path used at the
            # root of imports
            for source in sources:
                source_root = self.context.source_roots.find_by_path(
                    source).path
                relative_proto_files.add(os.path.relpath(source, source_root))

        args = ['--generated-source-directory', target_workdir]

        for root in target.payload.roots:
            args.extend(['--root', root])

        for path in self._calculate_proto_paths(target):
            # NB(gmalmquist): This isn't a typo. The --source argument is actually a proto path.
            args.extend(['--source', path])

        for source in relative_proto_files:
            args.extend(['--proto', source])

        return args
Beispiel #11
0
  def get_resolved_jars_for_jar_library(self, jar_library, memo=None):
    """Collects jars for the passed jar_library.

    Because artifacts are only fetched for the "winning" version of a module, the artifacts
    will not always represent the version originally declared by the library.

    This method is transitive within the library's jar_dependencies, but will NOT
    walk into its non-jar dependencies.

    :param jar_library A JarLibrary to collect the transitive artifacts for.
    :param memo see `traverse_dependency_graph`
    :returns: all the artifacts for all of the jars in this library, including transitive deps
    :rtype: list of str
    """
    def to_resolved_jar(jar_module_ref, artifact_path):
      return ResolvedJar(coordinate=M2Coordinate(org=jar_module_ref.org, name=jar_module_ref.name,
                                                 rev=jar_module_ref.rev,
                                                 classifier=jar_module_ref.classifier),
                         cache_path=artifact_path
      )
    resolved_jars = OrderedSet()
    def create_collection(dep):
      return OrderedSet([dep])
    for jar in jar_library.jar_dependencies:
      for classifier in jar.artifact_classifiers:
        jar_module_ref = IvyModuleRef(jar.org, jar.name, jar.rev, classifier)
        for module_ref in self.traverse_dependency_graph(jar_module_ref, create_collection, memo):
          for artifact_path in self._artifacts_by_ref[module_ref.unversioned]:
            resolved_jars.add(to_resolved_jar(jar_module_ref, artifact_path))
    return resolved_jars
Beispiel #12
0
    def _create_doc_target(self):
        all_sources = []
        all_deps = OrderedSet()
        for target in self.targets:
            if not self.only_provides or is_exported(target):
                for source in target.sources:
                    source_path = os.path.join(self.java_src_prefix, source)
                    if os.path.exists(source_path):
                        all_sources.append(source_path)
                    else:
                        print "skipping %s" % source_path

                    for jar_dep in target.jar_dependencies:
                        if jar_dep.rev:
                            all_deps.add(copy(jar_dep).intransitive())

        def create_meta_target():
            return JavaLibrary('pants.doc.deps',
                               all_sources,
                               provides=None,
                               dependencies=all_deps,
                               excludes=None,
                               resources=None,
                               binary_resources=None,
                               deployjar=False,
                               buildflags=None,
                               is_meta=True)

        # TODO(John Sirois): Find a better way to do_in_context when we don't care about the context
        return list(self.targets)[0].do_in_context(create_meta_target)
Beispiel #13
0
  def get_resolved_jars_for_coordinates(self, coordinates, memo=None):
    """Collects jars for the passed coordinates.

    Because artifacts are only fetched for the "winning" version of a module, the artifacts
    will not always represent the version originally declared by the library.

    This method is transitive within the passed coordinates dependencies.

    :param coordinates collections.Iterable: Collection of coordinates to collect transitive
                                             resolved jars for.
    :param memo: See `traverse_dependency_graph`.
    :returns: All the artifacts for all of the jars for the provided coordinates,
              including transitive dependencies.
    :rtype: list of :class:`pants.java.jar.ResolvedJar`
    """
    def to_resolved_jar(jar_ref, jar_path):
      return ResolvedJar(coordinate=M2Coordinate(org=jar_ref.org,
                                                 name=jar_ref.name,
                                                 rev=jar_ref.rev,
                                                 classifier=jar_ref.classifier,
                                                 ext=jar_ref.ext),
                         cache_path=jar_path)
    resolved_jars = OrderedSet()
    def create_collection(dep):
      return OrderedSet([dep])
    for jar in coordinates:
      classifier = jar.classifier if self._conf == 'default' else self._conf
      jar_module_ref = IvyModuleRef(jar.org, jar.name, jar.rev, classifier, jar.ext)
      for module_ref in self.traverse_dependency_graph(jar_module_ref, create_collection, memo):
        for artifact_path in self._artifacts_by_ref[module_ref.unversioned]:
          resolved_jars.add(to_resolved_jar(module_ref, artifact_path))
    return resolved_jars
Beispiel #14
0
class RootedProducts(object):
    """Products of a build that have a concept of a 'root' directory.

  E.g., classfiles, under a root package directory."""
    def __init__(self, root):
        self._root = root
        self._rel_paths = OrderedSet()

    def add_abs_paths(self, abs_paths):
        for abs_path in abs_paths:
            if not abs_path.startswith(self._root):
                raise Exception('%s is not under %s' % (abs_path, self._root))
            self._rel_paths.add(os.path.relpath(abs_path, self._root))

    def add_rel_paths(self, rel_paths):
        self._rel_paths.update(rel_paths)

    def root(self):
        return self._root

    def rel_paths(self):
        return self._rel_paths

    def abs_paths(self):
        for relpath in self._rel_paths:
            yield os.path.join(self._root, relpath)
Beispiel #15
0
  def bundle(self, app):
    """Create a self-contained application bundle.

    The bundle will contain the target classes, dependencies and resources.
    """
    assert(isinstance(app, BundleCreate.App))

    def verbose_symlink(src, dst):
      try:
        os.symlink(src, dst)
      except OSError as e:
        self.context.log.error("Unable to create symlink: {0} -> {1}".format(src, dst))
        raise e

    bundle_dir = os.path.join(self._outdir, '%s-bundle' % app.basename)
    self.context.log.info('creating %s' % os.path.relpath(bundle_dir, get_buildroot()))

    safe_mkdir(bundle_dir, clean=True)

    classpath = OrderedSet()
    # If creating a deployjar, we add the external dependencies to the bundle as
    # loose classes, and have no classpath. Otherwise we add the external dependencies
    # to the bundle as jars in a libs directory.
    if not self._create_deployjar:
      lib_dir = os.path.join(bundle_dir, 'libs')
      os.mkdir(lib_dir)

      jarmap = self.context.products.get('jars')

      def add_jars(target):
        generated = jarmap.get(target)
        if generated:
          for base_dir, internal_jars in generated.items():
            for internal_jar in internal_jars:
              verbose_symlink(os.path.join(base_dir, internal_jar), os.path.join(lib_dir, internal_jar))
              classpath.add(internal_jar)

      app.binary.walk(add_jars, lambda t: t != app.binary)

      # Add external dependencies to the bundle.
      for basedir, external_jar in self.list_external_jar_dependencies(app.binary):
        path = os.path.join(basedir, external_jar)
        verbose_symlink(path, os.path.join(lib_dir, external_jar))
        classpath.add(external_jar)

    bundle_jar = os.path.join(bundle_dir, '%s.jar' % app.binary.basename)

    with self.monolithic_jar(app.binary, bundle_jar,
                             with_external_deps=self._create_deployjar) as jar:
      self.add_main_manifest_entry(jar, app.binary)
      if classpath:
        jar.classpath([os.path.join('libs', jar) for jar in classpath])

    for bundle in app.bundles:
      for path, relpath in bundle.filemap.items():
        bundle_path = os.path.join(bundle_dir, relpath)
        safe_mkdir(os.path.dirname(bundle_path))
        verbose_symlink(path, bundle_path)

    return bundle_dir
Beispiel #16
0
 def get_jars_for_ivy_module(self, jar):
   ref = IvyModuleRef(jar.org, jar.name, jar.rev)
   deps = OrderedSet()
   for dep in self.deps_by_caller.get(ref, []):
     deps.add(dep)
     deps.update(self.get_jars_for_ivy_module(dep))
   return deps
Beispiel #17
0
async def transitive_hydrated_targets(
        build_file_addresses: BuildFileAddresses) -> TransitiveHydratedTargets:
    """Given BuildFileAddresses, kicks off recursion on expansion of TransitiveHydratedTargets.

  The TransitiveHydratedTarget struct represents a structure-shared graph, which we walk
  and flatten here. The engine memoizes the computation of TransitiveHydratedTarget, so
  when multiple TransitiveHydratedTargets objects are being constructed for multiple
  roots, their structure will be shared.
  """

    transitive_hydrated_targets = await MultiGet(
        Get[TransitiveHydratedTarget](Address, a)
        for a in build_file_addresses.addresses)

    closure = OrderedSet()
    to_visit = deque(transitive_hydrated_targets)

    while to_visit:
        tht = to_visit.popleft()
        if tht.root in closure:
            continue
        closure.add(tht.root)
        to_visit.extend(tht.dependencies)

    return TransitiveHydratedTargets(
        tuple(tht.root for tht in transitive_hydrated_targets), closure)
Beispiel #18
0
def dump_requirements(builder, interpreter, reqs, log, platforms=None):
    """Multi-platform dependency resolution for PEX files.

  :param builder: Dump the requirements into this builder.
  :param interpreter: The :class:`PythonInterpreter` to resolve requirements for.
  :param reqs: A list of :class:`PythonRequirement` to resolve.
  :param log: Use this logger.
  :param platforms: A list of :class:`Platform`s to resolve requirements for.
                    Defaults to the platforms specified by PythonSetup.
  """
    deduped_reqs = OrderedSet(reqs)
    find_links = OrderedSet()
    blacklist = PythonSetup.global_instance().resolver_blacklist
    for req in deduped_reqs:
        log.debug('  Dumping requirement: {}'.format(req))
        if not (req.key in blacklist
                and interpreter.identity.matches(blacklist[req.key])):
            builder.add_requirement(req.requirement)
        if req.repository:
            find_links.add(req.repository)

    # Resolve the requirements into distributions.
    distributions = _resolve_multi(interpreter, deduped_reqs, platforms,
                                   find_links)
    locations = set()
    for platform, dists in distributions.items():
        for dist in dists:
            if dist.location not in locations:
                log.debug('  Dumping distribution: .../{}'.format(
                    os.path.basename(dist.location)))
                builder.add_distribution(dist)
            locations.add(dist.location)
Beispiel #19
0
class StepContext(object):
    """Encapsulates external state and the details of creating Nodes.

  This avoids giving Nodes direct access to the task list or subject set.
  """
    def __init__(self, node_builder, project_tree, node_states, inline_nodes):
        self._node_builder = node_builder
        self.project_tree = project_tree
        self._node_states = dict(node_states)
        self._parents = OrderedSet()
        self._inline_nodes = inline_nodes

    def get(self, node):
        """Given a Node and computed node_states, gets the current state for the Node.

    Optionally inlines execution of inlineable dependencies if `inline_nodes=True`.
    """
        state = self._node_states.get(node, None)
        if state is not None:
            return state
        if self._inline_nodes and node.is_inlineable:
            if node in self._parents:
                return Noop.cycle(list(self._parents)[-1], node)
            self._parents.add(node)
            state = self._node_states[node] = node.step(self)
            self._parents.remove(node)
            return state
        else:
            return Waiting([node])

    def gen_nodes(self, subject, product, variants):
        """Yields Node instances which might be able to provide a value for the given inputs."""
        return self._node_builder.gen_nodes(subject, product, variants)

    def select_node(self, selector, subject, variants):
        """Constructs a Node for the given Selector and the given Subject/Variants.

    This method is decoupled from Selector classes in order to allow the `selector` package to not
    need a dependency on the `nodes` package.
    """
        selector_type = type(selector)
        if selector_type is Select:
            return SelectNode(subject, selector.product, variants, None)
        elif selector_type is SelectVariant:
            return SelectNode(subject, selector.product, variants,
                              selector.variant_key)
        elif selector_type is SelectDependencies:
            return DependenciesNode(subject, selector.product, variants,
                                    selector.deps_product, selector.field)
        elif selector_type is SelectProjection:
            return ProjectionNode(subject, selector.product, variants,
                                  selector.projected_subject, selector.fields,
                                  selector.input_product)
        elif selector_type is SelectLiteral:
            # NB: Intentionally ignores subject parameter to provide a literal subject.
            return SelectNode(selector.subject, selector.product, variants,
                              None)
        else:
            raise ValueError('Unrecognized Selector type "{}" for: {}'.format(
                selector_type, selector))
Beispiel #20
0
def dump_requirements(builder, interpreter, reqs, log, platforms=None):
  """Multi-platform dependency resolution for PEX files.

  :param builder: Dump the requirements into this builder.
  :param interpreter: The :class:`PythonInterpreter` to resolve requirements for.
  :param reqs: A list of :class:`PythonRequirement` to resolve.
  :param log: Use this logger.
  :param platforms: A list of :class:`Platform`s to resolve requirements for.
                    Defaults to the platforms specified by PythonSetup.
  """
  deduped_reqs = OrderedSet(reqs)
  find_links = OrderedSet()
  blacklist = PythonSetup.global_instance().resolver_blacklist
  for req in deduped_reqs:
    log.debug('  Dumping requirement: {}'.format(req))
    if not (req.key in blacklist and interpreter.identity.matches(blacklist[req.key])):
      builder.add_requirement(req.requirement)
    if req.repository:
      find_links.add(req.repository)

  # Resolve the requirements into distributions.
  distributions = _resolve_multi(interpreter, deduped_reqs, platforms, find_links)
  locations = set()
  for platform, dists in distributions.items():
    for dist in dists:
      if dist.location not in locations:
        log.debug('  Dumping distribution: .../{}'.format(os.path.basename(dist.location)))
        builder.add_distribution(dist)
      locations.add(dist.location)
Beispiel #21
0
    def _compute_sources(self, target):
        relative_sources = OrderedSet()
        source_roots = OrderedSet()

        def capture_and_relativize_to_source_root(source):
            source_root = self.context.source_roots.find_by_path(source)
            if not source_root:
                source_root = self.context.source_roots.find(target)
            source_roots.add(source_root.path)
            return fast_relpath(source, source_root.path)

        if target.payload.get_field_value('ordered_sources'):
            # Re-match the filespecs against the sources in order to apply them in the literal order
            # they were specified in.
            filespec = target.globs_relative_to_buildroot()
            excludes = filespec.get('excludes', [])
            for filespec in filespec.get('globs', []):
                sources = [
                    s for s in target.sources_relative_to_buildroot()
                    if globs_matches([s], [filespec], excludes)
                ]
                if len(sources) != 1:
                    raise TargetDefinitionException(
                        target,
                        'With `ordered_sources=True`, expected one match for each file literal, '
                        'but got: {} for literal `{}`.'.format(
                            sources, filespec))
                relative_sources.add(
                    capture_and_relativize_to_source_root(sources[0]))
        else:
            # Otherwise, use the default (unspecified) snapshot ordering.
            for source in target.sources_relative_to_buildroot():
                relative_sources.add(
                    capture_and_relativize_to_source_root(source))
        return relative_sources, source_roots
Beispiel #22
0
    def targets(self, predicate=None, **kwargs):
        """Selects targets in-play in this run from the target roots and their transitive dependencies.

    Also includes any new synthetic targets created from the target roots or their transitive
    dependencies during the course of the run.

    See Target.closure_for_targets for remaining parameters.

    :API: public

    :param predicate: If specified, the predicate will be used to narrow the scope of targets
                      returned.
    :param bool postorder: `True` to gather transitive dependencies with a postorder traversal;
                          `False` or preorder by default.
    :returns: A list of matching targets.
    """
        target_set = self._collect_targets(self.target_roots, **kwargs)

        synthetics = OrderedSet()
        for synthetic_address in self.build_graph.synthetic_addresses:
            if self.build_graph.get_concrete_derived_from(
                    synthetic_address) in target_set:
                synthetics.add(self.build_graph.get_target(synthetic_address))
        target_set.update(self._collect_targets(synthetics, **kwargs))

        return list(filter(predicate, target_set))
Beispiel #23
0
    def _calculate_proto_paths(self, target):
        """Computes the set of paths that wire uses to lookup imported protos.

    The protos under these paths are not compiled, but they are required to compile the protos that
    imported.
    :param target: the JavaWireLibrary target to compile.
    :return: an ordered set of directories to pass along to wire.
    """
        proto_paths = OrderedSet()
        proto_paths.add(
            os.path.join(get_buildroot(),
                         self.context.source_roots.find(target).path))

        def collect_proto_paths(dep):
            if not dep.has_sources():
                return
            for source in dep.sources_relative_to_buildroot():
                if source.endswith('.proto'):
                    root = self.context.source_roots.find_by_path(source)
                    if root:
                        proto_paths.add(
                            os.path.join(get_buildroot(), root.path))

        collect_proto_paths(target)
        target.walk(collect_proto_paths)
        return proto_paths
Beispiel #24
0
    def _register(cls, source_root_dir, mutable, *allowed_target_types):
        """Registers a source root.

    :param string source_root_dir: The source root directory against which we resolve source paths,
                     relative to the build root.
    :param list allowed_target_types: Optional list of target types. If specified, we enforce that
                          only targets of those types appear under this source root.
    """
        # Temporary delegation to the new implementation, until this entire file goes away.
        SourceRootConfig.global_instance().get_source_roots().add_source_root(
            source_root_dir)

        source_root_dir = SourceRoot._relative_to_buildroot(source_root_dir)

        types = cls._TYPES_BY_ROOT.get(source_root_dir)
        if types is None:
            types = OrderedSet()
            cls._TYPES_BY_ROOT[source_root_dir] = types

        for allowed_target_type in allowed_target_types:
            types.add(allowed_target_type)
            roots = cls._ROOTS_BY_TYPE.get(allowed_target_type)
            if roots is None:
                roots = OrderedSet()
                cls._ROOTS_BY_TYPE[allowed_target_type] = roots
            roots.add(source_root_dir)

        cls._SOURCE_ROOT_TREE.add_root(source_root_dir, allowed_target_types,
                                       mutable)
  def parse_addresses(self, specs, fail_fast=False):
    """Process a list of command line specs and perform expansion.  This method can expand a list
    of command line specs.
    :param list specs: either a single spec string or a list of spec strings.
    :return: a generator of specs parsed into addresses.
    :raises: CmdLineSpecParser.BadSpecError if any of the address selectors could not be parsed.
    """
    specs = maybe_list(specs)

    addresses = OrderedSet()
    for spec in specs:
      for address in self._parse_spec(spec, fail_fast):
        addresses.add(address)

    results = filter(self._not_excluded_address, addresses)

    # Print debug information about the excluded targets
    if logger.getEffectiveLevel() <= logging.DEBUG and self._exclude_patterns:
      logger.debug('excludes:\n  {excludes}'
                   .format(excludes='\n  '.join(self._exclude_target_regexps)))
      targets = ', '.join(self._excluded_target_map[CmdLineSpecParser._UNMATCHED_KEY])
      logger.debug('Targets after excludes: {targets}'.format(targets=targets))
      excluded_count = 0
      for pattern, targets in six.iteritems(self._excluded_target_map):
        if pattern != CmdLineSpecParser._UNMATCHED_KEY:
          logger.debug('Targets excluded by pattern {pattern}\n  {targets}'
                       .format(pattern=pattern,
                               targets='\n  '.join(targets)))
          excluded_count += len(targets)
      logger.debug('Excluded {count} target{plural}.'
                   .format(count=excluded_count,
                           plural=('s' if excluded_count != 1 else '')))
    return results
Beispiel #26
0
    def execute(self, **kwargs):
        # NB: kwargs are for testing and pass-through to underlying subprocess process spawning.

        go_targets = OrderedSet(target for target in self.context.target_roots
                                if self.is_go(target))
        args = self.get_passthru_args()
        if not go_targets or not args:
            msg = (yellow(
                'The pants `{goal}` goal expects at least one go target and at least one '
                'pass-through argument to be specified, call with:\n') +
                   green('  ./pants {goal} {targets} -- {args}').format(
                       goal=self.options_scope,
                       targets=(green(' '.join(t.address.reference()
                                               for t in go_targets)) if
                                go_targets else red('[missing go targets]')),
                       args=green(' '.join(args))
                       if args else red('[missing pass-through args]')))
            raise self.MissingArgsError(msg)

        go_path = OrderedSet()
        import_paths = OrderedSet()
        for target in go_targets:
            self.ensure_workspace(target)
            go_path.add(self.get_gopath(target))
            import_paths.add(target.import_path)

        self.execute_with_go_env(os.pathsep.join(go_path), list(import_paths),
                                 args, **kwargs)
Beispiel #27
0
  def get_resolved_jars_for_jar_library(self, jar_library, memo=None):
    """Collects jars for the passed jar_library.

    Because artifacts are only fetched for the "winning" version of a module, the artifacts
    will not always represent the version originally declared by the library.

    This method is transitive within the library's jar_dependencies, but will NOT
    walk into its non-jar dependencies.

    :param jar_library A JarLibrary to collect the transitive artifacts for.
    :param memo see `traverse_dependency_graph`
    :returns: all the artifacts for all of the jars in this library, including transitive deps
    :rtype: list of :class:`pants.backend.jvm.jar_dependency_utils.ResolvedJar`
    """
    def to_resolved_jar(jar_ref, jar_path):
      return ResolvedJar(coordinate=M2Coordinate(org=jar_ref.org,
                                                 name=jar_ref.name,
                                                 rev=jar_ref.rev,
                                                 classifier=jar_ref.classifier,
                                                 ext=jar_ref.ext),
                         cache_path=jar_path)
    resolved_jars = OrderedSet()
    def create_collection(dep):
      return OrderedSet([dep])
    for jar in jar_library.jar_dependencies:
      classifier = jar.classifier if self._conf == 'default' else self._conf
      jar_module_ref = IvyModuleRef(jar.org, jar.name, jar.rev, classifier)
      for module_ref in self.traverse_dependency_graph(jar_module_ref, create_collection, memo):
        for artifact_path in self._artifacts_by_ref[module_ref.unversioned]:
          resolved_jars.add(to_resolved_jar(module_ref, artifact_path))
    return resolved_jars
Beispiel #28
0
 def _create_java_target(self, target, dependees):
   genfiles = []
   for source in target.sources_relative_to_source_root():
     path = os.path.join(target.target_base, source)
     genfiles.extend(self.calculate_genfiles(path, source).get('java', []))
   spec_path = os.path.relpath(self.java_out, get_buildroot())
   address = SyntheticAddress(spec_path, target.id)
   deps = OrderedSet(self.javadeps)
   import_jars = target.imported_jars
   jars_tgt = self.context.add_new_target(SyntheticAddress(spec_path, target.id+str('-rjars')),
                                          JarLibrary,
                                          jars=import_jars,
                                          derived_from=target)
   # Add in the 'spec-rjars' target, which contains all the JarDependency targets passed in via the
   # imports parameter. Each of these jars is expected to contain .proto files bundled together
   # with their .class files.
   deps.add(jars_tgt)
   tgt = self.context.add_new_target(address,
                                     JavaLibrary,
                                     derived_from=target,
                                     sources=genfiles,
                                     provides=target.provides,
                                     dependencies=deps,
                                     excludes=target.payload.get_field_value('excludes'))
   for dependee in dependees:
     dependee.inject_dependency(tgt.address)
   return tgt
Beispiel #29
0
  def get_resolved_jars_for_coordinates(self, coordinates, memo=None):
    """Collects jars for the passed coordinates.

    Because artifacts are only fetched for the "winning" version of a module, the artifacts
    will not always represent the version originally declared by the library.

    This method is transitive within the passed coordinates dependencies.

    :param coordinates collections.Iterable: Collection of coordinates to collect transitive
                                             resolved jars for.
    :param memo: See `traverse_dependency_graph`.
    :returns: All the artifacts for all of the jars for the provided coordinates,
              including transitive dependencies.
    :rtype: list of :class:`pants.backend.jvm.jar_dependency_utils.ResolvedJar`
    """
    def to_resolved_jar(jar_ref, jar_path):
      return ResolvedJar(coordinate=M2Coordinate(org=jar_ref.org,
                                                 name=jar_ref.name,
                                                 rev=jar_ref.rev,
                                                 classifier=jar_ref.classifier,
                                                 ext=jar_ref.ext),
                         cache_path=jar_path)
    resolved_jars = OrderedSet()
    def create_collection(dep):
      return OrderedSet([dep])
    for jar in coordinates:
      classifier = jar.classifier if self._conf == 'default' else self._conf
      jar_module_ref = IvyModuleRef(jar.org, jar.name, jar.rev, classifier)
      for module_ref in self.traverse_dependency_graph(jar_module_ref, create_collection, memo):
        for artifact_path in self._artifacts_by_ref[module_ref.unversioned]:
          resolved_jars.add(to_resolved_jar(module_ref, artifact_path))
    return resolved_jars
Beispiel #30
0
    def parse_args(args):
        goals = OrderedSet()
        specs = OrderedSet()
        help = False
        explicit_multi = False

        def is_spec(spec):
            return os.sep in spec or ':' in spec

        for i, arg in enumerate(args):
            help = help or 'help' == arg
            if not arg.startswith('-'):
                specs.add(arg) if is_spec(arg) else goals.add(arg)
            elif '--' == arg:
                if specs:
                    raise GoalError(
                        'Cannot intermix targets with goals when using --. Targets should '
                        'appear on the right')
                explicit_multi = True
                del args[i]
                break

        if explicit_multi:
            spec_offset = len(goals) + 1 if help else len(goals)
            specs.update(arg for arg in args[spec_offset:]
                         if not arg.startswith('-'))

        return goals, specs
Beispiel #31
0
 def get_jars_for_ivy_module(self, jar):
   ref = IvyModuleRef(jar.org, jar.name, jar.rev)
   deps = OrderedSet()
   for dep in self.deps_by_caller.get(ref, []):
     deps.add(dep)
     deps.update(self.get_jars_for_ivy_module(dep))
   return deps
Beispiel #32
0
 def _create_java_target(self, target, dependees):
   genfiles = []
   for source in target.sources_relative_to_source_root():
     path = os.path.join(target.target_base, source)
     genfiles.extend(calculate_genfiles(path, source).get('java', []))
   spec_path = os.path.relpath(self.java_out, get_buildroot())
   address = SyntheticAddress(spec_path, target.id)
   deps = OrderedSet(self.javadeps)
   import_jars = target.imports
   jars_tgt = self.context.add_new_target(SyntheticAddress(spec_path, target.id+str('-rjars')),
                                          JarLibrary,
                                          jars=import_jars,
                                          derived_from=target)
   # Add in the 'spec-rjars' target, which contains all the JarDependency targets passed in via the
   # imports parameter. Each of these jars is expected to contain .proto files bundled together
   # with their .class files.
   deps.add(jars_tgt)
   tgt = self.context.add_new_target(address,
                                     JavaLibrary,
                                     derived_from=target,
                                     sources=genfiles,
                                     provides=target.provides,
                                     dependencies=deps,
                                     excludes=target.payload.get_field_value('excludes'))
   for dependee in dependees:
     dependee.inject_dependency(tgt.address)
   return tgt
Beispiel #33
0
  def _register(cls, source_root_dir, mutable, *allowed_target_types):
    """Registers a source root.

    :param string source_root_dir: The source root directory against which we resolve source paths,
                     relative to the build root.
    :param list allowed_target_types: Optional list of target types. If specified, we enforce that
                          only targets of those types appear under this source root.
    """
    # Temporary delegation to the new implementation, until this entire file goes away.
    SourceRootConfig.global_instance().get_source_roots().add_source_root(source_root_dir)

    source_root_dir = SourceRoot._relative_to_buildroot(source_root_dir)

    types = cls._TYPES_BY_ROOT.get(source_root_dir)
    if types is None:
      types = OrderedSet()
      cls._TYPES_BY_ROOT[source_root_dir] = types

    for allowed_target_type in allowed_target_types:
      types.add(allowed_target_type)
      roots = cls._ROOTS_BY_TYPE.get(allowed_target_type)
      if roots is None:
        roots = OrderedSet()
        cls._ROOTS_BY_TYPE[allowed_target_type] = roots
      roots.add(source_root_dir)

    cls._SOURCE_ROOT_TREE.add_root(source_root_dir, allowed_target_types, mutable)
Beispiel #34
0
    def _register(cls, source_root_dir, *allowed_target_types):
        """Registers a source root.

    :param string source_root_dir: The source root directory against which we resolve source paths,
                     relative to the build root.
    :param list allowed_target_types: Optional list of target types. If specified, we enforce that
                          only targets of those types appear under this source root.
    """
        # Verify that source_root_dir doesn't reach outside buildroot.
        buildroot = os.path.normpath(get_buildroot())
        if source_root_dir.startswith(buildroot):
            abspath = os.path.normpath(source_root_dir)
        else:
            abspath = os.path.normpath(os.path.join(buildroot,
                                                    source_root_dir))
        if not abspath.startswith(buildroot):
            raise ValueError('Source root %s is not under the build root %s' %
                             (abspath, buildroot))
        source_root_dir = os.path.relpath(abspath, buildroot)

        types = cls._TYPES_BY_ROOT.get(source_root_dir)
        if types is None:
            types = OrderedSet()
            cls._TYPES_BY_ROOT[source_root_dir] = types

        for allowed_target_type in allowed_target_types:
            types.add(allowed_target_type)
            roots = cls._ROOTS_BY_TYPE.get(allowed_target_type)
            if roots is None:
                roots = OrderedSet()
                cls._ROOTS_BY_TYPE[allowed_target_type] = roots
            roots.add(source_root_dir)

        cls._SOURCE_ROOT_TREE.add_root(source_root_dir, allowed_target_types)
Beispiel #35
0
  def _resolve_overrides(self):
    """
    Resolves override targets, and then excludes and re-includes each of them
    to create and return a new dependency set.
    """
    if not self.override_targets:
      return self._pre_override_dependencies

    result = OrderedSet()

    # resolve overrides and fetch all of their "artifact-providing" dependencies
    excludes = set()
    for override_target in self.override_targets:
      # add pre_override deps of the target as exclusions
      for resolved in override_target.resolve():
        excludes.update(self._excludes(resolved))
      # prepend the target as a new target
      result.add(override_target)

    # add excludes for each artifact
    for direct_dep in self._pre_override_dependencies:
      # add relevant excludes to jar dependencies
      for jar_dep in self._jar_dependencies(direct_dep):
        for exclude in excludes:
          jar_dep.exclude(exclude.org, exclude.name)
      result.add(direct_dep)

    return result
Beispiel #36
0
  def targets(self, predicate=None, postorder=False):
    """Selects targets in-play in this run from the target roots and their transitive dependencies.

    Also includes any new synthetic targets created from the target roots or their transitive
    dependencies during the course of the run.

    :API: public

    :param predicate: If specified, the predicate will be used to narrow the scope of targets
                      returned.
    :param bool postorder: `True` to gather transitive dependencies with a postorder traversal;
                          `False` or preorder by default.
    :returns: A list of matching targets.
    """
    target_set = self._collect_targets(self.target_roots, postorder=postorder)

    synthetics = OrderedSet()
    for synthetic_address in self.build_graph.synthetic_addresses:
      if self.build_graph.get_concrete_derived_from(synthetic_address) in target_set:
        synthetics.add(self.build_graph.get_target(synthetic_address))

    synthetic_set = self._collect_targets(synthetics, postorder=postorder)

    target_set.update(synthetic_set)

    return filter(predicate, target_set)
Beispiel #37
0
    def bundle(self, app):
        """Create a self-contained application bundle.

    The bundle will contain the target classes, dependencies and resources.
    """
        assert (isinstance(app, BundleCreate.App))

        bundle_dir = os.path.join(self._outdir, '%s-bundle' % app.basename)
        self.context.log.info('creating %s' %
                              os.path.relpath(bundle_dir, get_buildroot()))

        safe_mkdir(bundle_dir, clean=True)

        classpath = OrderedSet()
        # If creating a deployjar, we add the external dependencies to the bundle as
        # loose classes, and have no classpath. Otherwise we add the external dependencies
        # to the bundle as jars in a libs directory.
        if not self._create_deployjar:
            lib_dir = os.path.join(bundle_dir, 'libs')
            os.mkdir(lib_dir)

            jarmap = self.context.products.get('jars')

            def add_jars(target):
                generated = jarmap.get(target)
                if generated:
                    for base_dir, internal_jars in generated.items():
                        for internal_jar in internal_jars:
                            os.symlink(os.path.join(base_dir, internal_jar),
                                       os.path.join(lib_dir, internal_jar))
                            classpath.add(internal_jar)

            app.binary.walk(add_jars, lambda t: t != app.binary)

            # Add external dependencies to the bundle.
            for basedir, external_jar in self.list_jar_dependencies(
                    app.binary):
                path = os.path.join(basedir, external_jar)
                os.symlink(path, os.path.join(lib_dir, external_jar))
                classpath.add(external_jar)

        bundle_jar = os.path.join(bundle_dir, '%s.jar' % app.binary.basename)

        with self.monolithic_jar(
                app.binary, bundle_jar,
                with_external_deps=self._create_deployjar) as jar:
            manifest = self.create_main_manifest(app.binary)
            if classpath:
                manifest.addentry(
                    Manifest.CLASS_PATH,
                    ' '.join(os.path.join('libs', jar) for jar in classpath))
            jar.writestr(Manifest.PATH, manifest.contents())

        for bundle in app.bundles:
            for path, relpath in bundle.filemap.items():
                bundle_path = os.path.join(bundle_dir, relpath)
                safe_mkdir(os.path.dirname(bundle_path))
                os.symlink(path, bundle_path)

        return bundle_dir
  def _format_args_for_target(self, target, target_workdir):
    """Calculate the arguments to pass to the command line for a single target."""

    relative_proto_files = OrderedSet()
    if target.payload.proto_files:
      relative_proto_files.update(target.payload.proto_files)
    else:
      sources = OrderedSet(target.sources_relative_to_buildroot())
      if not self.validate_sources_present(sources, [target]):
        return None
      # Compute the source path relative to the 'source root' which is the path used at the
      # root of imports
      for source in sources:
        source_root = self.context.source_roots.find_by_path(source).path
        relative_proto_files.add(os.path.relpath(source, source_root))

    args = ['--generated-source-directory', target_workdir]

    for root in target.payload.roots:
      args.extend(['--root', root])

    for path in self._calculate_proto_paths(target):
      # NB(gmalmquist): This isn't a typo. The --source argument is actually a proto path.
      args.extend(['--source', path])

    for source in relative_proto_files:
      args.extend(['--proto', source])

    return args
Beispiel #39
0
    def execute_codegen(self, targets):
        # Invoke the generator once per target.  Because the wire compiler has flags that try to reduce
        # the amount of code emitted, Invoking them all together will break if one target specifies a
        # service_writer and another does not, or if one specifies roots and another does not.
        for target in targets:
            sources_by_base = self._calculate_sources([target])
            if self.codegen_strategy.name() == 'isolated':
                sources = OrderedSet(target.sources_relative_to_buildroot())
            else:
                sources = OrderedSet(
                    itertools.chain.from_iterable(sources_by_base.values()))
            if not self.validate_sources_present(sources, [target]):
                continue
            relative_sources = OrderedSet()
            for source in sources:
                source_root = SourceRoot.find_by_path(source)
                if not source_root:
                    source_root = SourceRoot.find(target)
                relative_source = os.path.relpath(source, source_root)
                relative_sources.add(relative_source)
            check_duplicate_conflicting_protos(self, sources_by_base,
                                               relative_sources,
                                               self.context.log)

            args = ['--java_out={0}'.format(self.codegen_workdir(target))]

            # Add all params in payload to args

            if target.payload.get_field_value('no_options'):
                args.append('--no_options')

            service_writer = target.payload.service_writer
            if service_writer:
                args.append('--service_writer={0}'.format(service_writer))

            registry_class = target.payload.registry_class
            if registry_class:
                args.append('--registry_class={0}'.format(registry_class))

            if target.payload.roots:
                args.append('--roots={0}'.format(','.join(
                    target.payload.roots)))

            if target.payload.enum_options:
                args.append('--enum_options={0}'.format(','.join(
                    target.payload.enum_options)))

            args.append('--proto_path={0}'.format(
                os.path.join(get_buildroot(), SourceRoot.find(target))))

            args.extend(relative_sources)

            result = util.execute_java(
                classpath=self.tool_classpath('wire-compiler'),
                main='com.squareup.wire.WireCompiler',
                args=args)
            if result != 0:
                raise TaskError(
                    'Wire compiler exited non-zero ({0})'.format(result))
Beispiel #40
0
 def finalize_plans(self, plans):
   subjects = set()
   jars = OrderedSet()
   for plan in plans:
     subjects.update(plan.subjects)
     jars.add(plan.jar)
   global_plan = Plan(func_or_task_type=IvyResolve, subjects=subjects, jars=list(jars))
   return [global_plan]
Beispiel #41
0
    def attempt(context, phases, timer=None):
        """
      Attempts to reach the goals for the supplied phases, optionally recording phase timings and
      then logging then when all specified phases have completed.
    """
        executed = OrderedDict()

        # I'd rather do this in a finally block below, but some goals os.fork and each of these cause
        # finally to run, printing goal timings multiple times instead of once at the end.
        def emit_timings():
            if timer:
                for phase, timings in executed.items():
                    for goal, times in timings.items():
                        timer.log('%s:%s' % (phase, goal), times)

        try:
            # Prepare tasks roots to leaves and allow for goals introducing new goals in existing phases.
            tasks_by_goal = {}
            expanded = OrderedSet()
            prepared = set()
            round = 0
            while True:
                goals = list(Phase.execution_order(phases))
                if set(goals) == prepared:
                    break
                else:
                    round += 1
                    context.log.debug('Preparing goals in round %d' % round)
                    for goal in reversed(goals):
                        if goal not in prepared:
                            phase = Phase.of(goal)
                            expanded.add(phase)
                            context.log.debug('preparing: %s:%s' %
                                              (phase, goal.name))
                            prepared.add(goal)
                            task = goal.prepare(context)
                            tasks_by_goal[goal] = task

            # Execute phases leaves to roots
            context.log.debug('Executing goals in phases %s' %
                              ' -> '.join(map(str, reversed(expanded))))
            for phase in phases:
                Group.execute(phase,
                              tasks_by_goal,
                              context,
                              executed,
                              timer=timer)

            emit_timings()
            return 0
        except (TaskError, GoalError) as e:
            message = '%s' % e
            if message:
                print('\nFAILURE: %s\n' % e)
            else:
                print('\nFAILURE\n')
            emit_timings()
            return 1
Beispiel #42
0
 def install_requires(cls, reduced_dependencies):
   install_requires = OrderedSet()
   for dep in reduced_dependencies:
     if cls.is_requirements(dep):
       for req in dep.payload.requirements:
         install_requires.add(str(req.requirement))
     elif cls.has_provides(dep):
       install_requires.add(dep.provides.key)
   return install_requires
Beispiel #43
0
 def install_requires(cls, reduced_dependencies):
     install_requires = OrderedSet()
     for dep in reduced_dependencies:
         if cls.is_requirements(dep):
             for req in dep.payload.requirements:
                 install_requires.add(str(req.requirement))
         elif cls.has_provides(dep):
             install_requires.add(dep.provides.key)
     return install_requires
Beispiel #44
0
  def setup_parser(self, parser, args):
    if not args:
      args.append('help')

    logger = logging.getLogger(__name__)

    goals = self.new_options.goals
    specs = self.new_options.target_specs
    fail_fast = self.new_options.for_global_scope().fail_fast

    for goal in goals:
      if BuildFile.from_cache(get_buildroot(), goal, must_exist=False).exists():
        logger.warning(" Command-line argument '{0}' is ambiguous and was assumed to be "
                       "a goal. If this is incorrect, disambiguate it with ./{0}.".format(goal))

    if self.new_options.is_help:
      self.new_options.print_help(goals=goals)
      sys.exit(0)

    self.requested_goals = goals

    with self.run_tracker.new_workunit(name='setup', labels=[WorkUnit.SETUP]):
      spec_parser = CmdLineSpecParser(self.root_dir, self.address_mapper,
                                      spec_excludes=self.get_spec_excludes())
      with self.run_tracker.new_workunit(name='parse', labels=[WorkUnit.SETUP]):
        for spec in specs:
          for address in spec_parser.parse_addresses(spec, fail_fast):
            self.build_graph.inject_address_closure(address)
            self.targets.append(self.build_graph.get_target(address))
    self.goals = [Goal.by_name(goal) for goal in goals]

    rcfiles = self.config.getdefault('rcfiles', type=list,
                                     default=['/etc/pantsrc', '~/.pants.rc'])
    if rcfiles:
      rcfile = RcFile(rcfiles, default_prepend=False, process_default=True)

      # Break down the goals specified on the command line to the full set that will be run so we
      # can apply default flags to inner goal nodes.  Also break down goals by Task subclass and
      # register the task class hierarchy fully qualified names so we can apply defaults to
      # baseclasses.

      sections = OrderedSet()
      for goal in Engine.execution_order(self.goals):
        for task_name in goal.ordered_task_names():
          sections.add(task_name)
          task_type = goal.task_type_by_name(task_name)
          for clazz in task_type.mro():
            if clazz == Task:
              break
            sections.add('%s.%s' % (clazz.__module__, clazz.__name__))

      augmented_args = rcfile.apply_defaults(sections, args)
      if augmented_args != args:
        # TODO(John Sirois): Cleanup this currently important mutation of the passed in args
        # once the 2-layer of command -> goal is squashed into one.
        args[:] = augmented_args
        sys.stderr.write("(using pantsrc expansion: pants goal %s)\n" % ' '.join(augmented_args))
Beispiel #45
0
 def resolve(requirements, config, platforms, pythons, ignore_errors=False):
   reqset = OrderedSet()
   requirements = list(requirements)
   for platform in platforms:
     for python in pythons:
       resolver = ReqResolver.resolver(config, platform, python)
       for dist in resolver.resolve(requirements, ignore_errors=ignore_errors):
         reqset.add(dist)
   return list(reqset)
Beispiel #46
0
    def dump(self):
        self.debug('Building chroot for %s:' % self._targets)
        targets = self.resolve(self._targets)

        for lib in targets['libraries'] | targets['binaries']:
            self._dump_library(lib)

        generated_reqs = OrderedSet()
        if targets['thrifts']:
            for thr in set(targets['thrifts']):
                if thr not in self.MEMOIZED_THRIFTS:
                    self.MEMOIZED_THRIFTS[
                        thr] = self._generate_thrift_requirement(thr)
                generated_reqs.add(self.MEMOIZED_THRIFTS[thr])

            generated_reqs.add(PythonRequirement('thrift', use_2to3=True))

        for antlr in targets['antlrs']:
            generated_reqs.add(self._generate_antlr_requirement(antlr))

        reqs_from_libraries = OrderedSet()
        for req_lib in targets['reqs']:
            for req in req_lib.payload.requirements:
                reqs_from_libraries.add(req)

        reqs_to_build = OrderedSet()
        find_links = []

        for req in reqs_from_libraries | generated_reqs | self._extra_requirements:
            if not req.should_build(self._interpreter.python,
                                    Platform.current()):
                self.debug('Skipping %s based upon version filter' % req)
                continue
            reqs_to_build.add(req)
            self._dump_requirement(req.requirement)
            if req.repository:
                find_links.append(req.repository)

        distributions = resolve_multi(self._config,
                                      reqs_to_build,
                                      interpreter=self._interpreter,
                                      platforms=self._platforms,
                                      find_links=find_links)

        locations = set()
        for platform, dist_set in distributions.items():
            for dist in dist_set:
                if dist.location not in locations:
                    self._dump_distribution(dist)
                locations.add(dist.location)

        if len(targets['binaries']) > 1:
            print('WARNING: Target has multiple python_binary targets!',
                  file=sys.stderr)

        return self._builder
Beispiel #47
0
  def bundle(self, app):
    """Create a self-contained application bundle containing the target
    classes, dependencies and resources.
    """
    assert(isinstance(app, BundleCreate.App))

    bundledir = os.path.join(self.outdir, '%s-bundle' % app.basename)
    self.context.log.info('creating %s' % os.path.relpath(bundledir, get_buildroot()))

    safe_mkdir(bundledir, clean=True)

    classpath = OrderedSet()
    if not self.deployjar:
      libdir = os.path.join(bundledir, 'libs')
      os.mkdir(libdir)

      # Add external dependencies to the bundle.
      for basedir, externaljar in self.list_jar_dependencies(app.binary):
        path = os.path.join(basedir, externaljar)
        os.symlink(path, os.path.join(libdir, externaljar))
        classpath.add(externaljar)

    # TODO: There should probably be a separate 'binary_jars' product type,
    # so we can more easily distinguish binary jars (that contain all the classes of their
    # transitive deps) and per-target jars.
    for basedir, jars in self.context.products.get('jars').get(app.binary).items():
      if len(jars) != 1:
        raise TaskError('Expected 1 mapped binary for %s but found: %s' % (app.binary, jars))

      binary = jars[0]
      binary_jar = os.path.join(basedir, binary)
      bundle_jar = os.path.join(bundledir, binary)
      # Add the internal classes into the bundle_jar.
      if not classpath:
        os.symlink(binary_jar, bundle_jar)
      else:
        # TODO: Can we copy the existing jar and inject the manifest in, instead of
        # laboriously copying the contents one by one? Would that be more efficient?
        with open_zip(binary_jar, 'r') as src:
          with open_zip(bundle_jar, 'w', compression=ZIP_DEFLATED) as dest:
            for item in src.infolist():
              buf = src.read(item.filename)
              if Manifest.PATH == item.filename:
                manifest = Manifest(buf)
                manifest.addentry(Manifest.CLASS_PATH,
                                  ' '.join(os.path.join('libs', jar) for jar in classpath))
                buf = manifest.contents()
              dest.writestr(item, buf)

    for bundle in app.bundles:
      for path, relpath in bundle.filemap.items():
        bundlepath = os.path.join(bundledir, relpath)
        safe_mkdir(os.path.dirname(bundlepath))
        os.symlink(path, bundlepath)

    return bundledir
Beispiel #48
0
class StepContext(object):
  """Encapsulates external state and the details of creating Nodes.

  This avoids giving Nodes direct access to the task list or subject set.
  """

  def __init__(self, node_builder, project_tree, node_states, inline_nodes):
    self._node_builder = node_builder
    self.project_tree = project_tree
    self._node_states = dict(node_states)
    self._parents = OrderedSet()
    self._inline_nodes = inline_nodes
    self.snapshot_archive_root = os.path.join(project_tree.build_root, '.snapshots')

  def get(self, node):
    """Given a Node and computed node_states, gets the current state for the Node.

    Optionally inlines execution of inlineable dependencies if `inline_nodes=True`.
    """
    state = self._node_states.get(node, None)
    if state is not None:
      return state
    if self._inline_nodes and node.is_inlineable:
      if node in self._parents:
        return Noop.cycle(list(self._parents)[-1], node)
      self._parents.add(node)
      state = self._node_states[node] = node.step(self)
      self._parents.remove(node)
      return state
    else:
      return Waiting([node])

  def gen_nodes(self, subject, product, variants):
    """Yields Node instances which might be able to provide a value for the given inputs."""
    return self._node_builder.gen_nodes(subject, product, variants)

  def select_node(self, selector, subject, variants):
    """Constructs a Node for the given Selector and the given Subject/Variants.

    This method is decoupled from Selector classes in order to allow the `selector` package to not
    need a dependency on the `nodes` package.
    """
    selector_type = type(selector)
    if selector_type is Select:
      return SelectNode(subject, selector.product, variants, None)
    elif selector_type is SelectVariant:
      return SelectNode(subject, selector.product, variants, selector.variant_key)
    elif selector_type is SelectDependencies:
      return DependenciesNode(subject, selector.product, variants, selector.deps_product, selector.field)
    elif selector_type is SelectProjection:
      return ProjectionNode(subject, selector.product, variants, selector.projected_subject, selector.fields, selector.input_product)
    elif selector_type is SelectLiteral:
      # NB: Intentionally ignores subject parameter to provide a literal subject.
      return SelectNode(selector.subject, selector.product, variants, None)
    else:
      raise ValueError('Unrecognized Selector type "{}" for: {}'.format(selector_type, selector))
  def dump(self):
    self.debug('Building chroot for {}:'.format(self._targets))
    targets = self.resolve(self._targets)

    for lib in targets['libraries'] | targets['binaries']:
      self._dump_library(lib)

    generated_reqs = OrderedSet()
    if targets['thrifts']:
      for thr in set(targets['thrifts']):
        if thr not in self.MEMOIZED_THRIFTS:
          self.MEMOIZED_THRIFTS[thr] = self._generate_thrift_requirement(thr)
        generated_reqs.add(self.MEMOIZED_THRIFTS[thr])

      generated_reqs.add(PythonRequirement('thrift', use_2to3=True))

    for antlr in targets['antlrs']:
      generated_reqs.add(self._generate_antlr_requirement(antlr))

    reqs_from_libraries = OrderedSet()
    for req_lib in targets['reqs']:
      for req in req_lib.payload.requirements:
        reqs_from_libraries.add(req)

    reqs_to_build = OrderedSet()
    find_links = []

    for req in reqs_from_libraries | generated_reqs | self._extra_requirements:
      if not req.should_build(self._interpreter.python, Platform.current()):
        self.debug('Skipping {} based upon version filter'.format(req))
        continue
      reqs_to_build.add(req)
      self._dump_requirement(req.requirement)
      if req.repository:
        find_links.append(req.repository)

    distributions = resolve_multi(
         self._python_setup,
         self._python_repos,
         reqs_to_build,
         interpreter=self._interpreter,
         platforms=self._platforms,
         ttl=self.context.options.for_global_scope().python_chroot_requirements_ttl,
         find_links=find_links)

    locations = set()
    for platform, dist_set in distributions.items():
      for dist in dist_set:
        if dist.location not in locations:
          self._dump_distribution(dist)
        locations.add(dist.location)

    if len(targets['binaries']) > 1:
      print('WARNING: Target has multiple python_binary targets!', file=sys.stderr)

    return self._builder
Beispiel #50
0
 def finalize_plans(self, plans):
     subjects = set()
     jars = OrderedSet()
     for plan in plans:
         subjects.update(plan.subjects)
         jars.add(plan.jar)
     global_plan = Plan(func_or_task_type=IvyResolve,
                        subjects=subjects,
                        jars=list(jars))
     return [global_plan]
Beispiel #51
0
 def resolve(requirements, config, platforms, pythons, ignore_errors=False):
     reqset = OrderedSet()
     requirements = list(requirements)
     for platform in platforms:
         for python in pythons:
             resolver = ReqResolver.resolver(config, platform, python)
             for dist in resolver.resolve(requirements,
                                          ignore_errors=ignore_errors):
                 reqset.add(dist)
     return list(reqset)
Beispiel #52
0
  def scan_buildfiles(root_dir, base_path = None):
    """Looks for all BUILD files under base_path"""

    buildfiles = OrderedSet()
    for root, dirs, files in os.walk(base_path if base_path else root_dir):
      for filename in files:
        if BuildFile._is_buildfile_name(filename):
          buildfile_relpath = os.path.relpath(os.path.join(root, filename), root_dir)
          buildfiles.add(BuildFile(root_dir, buildfile_relpath))
    return buildfiles
Beispiel #53
0
    def scan_buildfiles(root_dir, base_path=None):
        """Looks for all BUILD files under base_path"""

        buildfiles = OrderedSet()
        for root, dirs, files in os.walk(base_path if base_path else root_dir):
            for filename in files:
                if BuildFile._is_buildfile_name(filename):
                    buildfile_relpath = os.path.relpath(
                        os.path.join(root, filename), root_dir)
                    buildfiles.add(BuildFile(root_dir, buildfile_relpath))
        return buildfiles
Beispiel #54
0
  def attempt(context, phases, timer=None):
    """
      Attempts to reach the goals for the supplied phases, optionally recording phase timings and
      then logging then when all specified phases have completed.
    """
    executed = OrderedDict()

    # I'd rather do this in a finally block below, but some goals os.fork and each of these cause
    # finally to run, printing goal timings multiple times instead of once at the end.
    def emit_timings():
      if timer:
        for phase, timings in executed.items():
          for goal, times in timings.items():
            timer.log('%s:%s' % (phase, goal), times)

    try:
      # Prepare tasks roots to leaves and allow for goals introducing new goals in existing phases.
      tasks_by_goal = {}
      expanded = OrderedSet()
      prepared = set()
      round = 0
      while True:
        goals = list(Phase.execution_order(phases))
        if set(goals) == prepared:
          break
        else:
          round += 1
          context.log.debug('Preparing goals in round %d' % round)
          for goal in reversed(goals):
            if goal not in prepared:
              phase = Phase.of(goal)
              expanded.add(phase)
              context.log.debug('preparing: %s:%s' % (phase, goal.name))
              prepared.add(goal)
              task = goal.prepare(context)
              tasks_by_goal[goal] = task

      # Execute phases leaves to roots
      context.log.debug(
        'Executing goals in phases %s' % ' -> '.join(map(str, reversed(expanded)))
      )
      for phase in phases:
        Group.execute(phase, tasks_by_goal, context, executed, timer=timer)

      emit_timings()
      return 0
    except (TaskError, GoalError) as e:
      message = '%s' % e
      if message:
        print('\nFAILURE: %s\n' % e)
      else:
        print('\nFAILURE\n')
      emit_timings()
      return 1
Beispiel #55
0
    def strict_dependencies(self, dep_context):
        """
    :param dep_context: A DependencyContext with configuration for the request.
    :return: targets that this target "strictly" depends on. This set of dependencies contains
      only directly declared dependencies, with two exceptions:
        1) aliases are expanded transitively
        2) the strict_dependencies of targets exported targets exported by
      strict_dependencies (transitively).
    :rtype: list of Target
    """
        strict_deps = self._cached_strict_dependencies_map.get(
            dep_context, None)
        if strict_deps is None:
            default_predicate = self._closure_dep_predicate(
                {self}, **dep_context.target_closure_kwargs)
            # TODO(#5977): this branch needs testing!
            if not default_predicate:

                def default_predicate(*args, **kwargs):
                    return True

            def dep_predicate(source, dependency):
                if not default_predicate(source, dependency):
                    return False

                # Always expand aliases.
                if type(source) in dep_context.alias_types:
                    return True

                # Traverse other dependencies if they are exported.
                if source._dep_is_exported(dependency):
                    return True
                return False

            dep_addresses = [
                d.address for d in self.dependencies
                if default_predicate(self, d)
            ]
            result = self._build_graph.transitive_subgraph_of_addresses_bfs(
                addresses=dep_addresses, dep_predicate=dep_predicate)

            strict_deps = OrderedSet()
            for declared in result:
                if type(declared) in dep_context.alias_types:
                    continue
                if isinstance(declared, dep_context.types_with_closure):
                    strict_deps.update(
                        declared.closure(bfs=True,
                                         **dep_context.target_closure_kwargs))
                strict_deps.add(declared)

            strict_deps = list(strict_deps)
            self._cached_strict_dependencies_map[dep_context] = strict_deps
        return strict_deps
Beispiel #56
0
 def _parse_addresses(self):
   addresses = OrderedSet()
   for spec in self.args:
     try:
       if self.options.is_directory_list:
         for address in Command.scan_addresses(self.root_dir, spec):
           addresses.add(address)
       else:
         addresses.add(Address.parse(self.root_dir, spec))
     except:
       self.error("Problem parsing spec %s: %s" % (spec, traceback.format_exc()))
   return addresses
Beispiel #57
0
  def bundle(self, app):
    bundledir = os.path.join(self.outdir, '%s-bundle' % app.basename)
    self.context.log.info('creating %s' % os.path.relpath(bundledir, get_buildroot()))

    safe_mkdir(bundledir, clean=True)

    classpath = OrderedSet()
    if not self.deployjar:
      libdir = os.path.join(bundledir, 'libs')
      os.mkdir(libdir)

      for basedir, externaljar in self.list_jar_dependencies(app.binary):
        src = os.path.join(basedir, externaljar)
        link_name = os.path.join(libdir, externaljar)
        try:
          os.symlink(src, link_name)
        except OSError as e:
          if e.errno == errno.EEXIST:
            raise TaskError('Trying to symlink %s to %s, but it is already symlinked to %s. ' %
                            (link_name, src, os.readlink(link_name)) +
                            'Does the bundled target depend on multiple jvm_binary targets?')
          else:
            raise
        classpath.add(externaljar)

    for basedir, jars in self.context.products.get('jars').get(app.binary).items():
      if len(jars) != 1:
        raise TaskError('Expected 1 mapped binary but found: %s' % jars)

      binary = jars.pop()
      binary_jar = os.path.join(basedir, binary)
      bundle_jar = os.path.join(bundledir, binary)
      if not classpath:
        os.symlink(binary_jar, bundle_jar)
      else:
        with open_zip(binary_jar, 'r') as src:
          with open_zip(bundle_jar, 'w', compression=ZIP_DEFLATED) as dest:
            for item in src.infolist():
              buffer = src.read(item.filename)
              if Manifest.PATH == item.filename:
                manifest = Manifest(buffer)
                manifest.addentry(Manifest.CLASS_PATH,
                                  ' '.join(os.path.join('libs', jar) for jar in classpath))
                buffer = manifest.contents()
              dest.writestr(item, buffer)

    for bundle in app.bundles:
      for path, relpath in bundle.filemap.items():
        bundlepath = os.path.join(bundledir, relpath)
        safe_mkdir(os.path.dirname(bundlepath))
        os.symlink(path, bundlepath)

    return bundledir
  def find_all_relevant_resources_targets(self):
    # NB: Ordering isn't relevant here, because it is applied during the dep walk to
    # consume from the runtime_classpath.
    def is_jvm_target(target):
      return isinstance(target, JvmTarget)
    jvm_targets = self.context.targets(predicate=is_jvm_target)

    all_resources_tgts = OrderedSet()
    for target in Target.closure_for_targets(jvm_targets, bfs=True):
      if isinstance(target, Resources):
        all_resources_tgts.add(target)
    return all_resources_tgts
Beispiel #59
0
 def _parse_buildfiles(self):
   buildfiles = OrderedSet()
   for spec in self.args:
     try:
       if self.options.is_directory_list:
         for buildfile in BuildFile.scan_buildfiles(self.root_dir, spec):
           buildfiles.add(buildfile)
       else:
         buildfiles.add(Address.parse(self.root_dir, spec).buildfile)
     except:
       self.error("Problem parsing spec %s: %s" % (spec, traceback.format_exc()))
   return buildfiles