Example #1
0
    def execute_codegen(self, target, target_workdir):
        sources_by_base = self._calculate_sources(target)
        sources = target.sources_relative_to_buildroot()

        bases = OrderedSet(sources_by_base.keys())
        bases.update(self._proto_path_imports([target]))

        gen_flag = "--java_out"

        gen = "{0}={1}".format(gen_flag, target_workdir)

        args = [self.protobuf_binary, gen]

        if self.plugins:
            for plugin in self.plugins:
                args.append("--{0}_out={1}".format(plugin, target_workdir))

        for base in bases:
            args.append("--proto_path={0}".format(base))

        args.extend(sources)

        # Tack on extra path entries. These can be used to find protoc plugins
        protoc_environ = os.environ.copy()
        if self._extra_paths:
            protoc_environ["PATH"] = os.pathsep.join(self._extra_paths + protoc_environ["PATH"].split(os.pathsep))

        # Note: The test_source_ordering integration test scrapes this output, so modify it with care.
        self.context.log.debug("Executing: {0}".format("\\\n  ".join(args)))
        with self.context.new_workunit(name="protoc", labels=[WorkUnitLabel.TOOL], cmd=" ".join(args)) as workunit:
            result = subprocess.call(
                args, env=protoc_environ, stdout=workunit.output("stdout"), stderr=workunit.output("stderr")
            )
            if result != 0:
                raise TaskError("{} ... exited non-zero ({})".format(self.protobuf_binary, result))
Example #2
0
  def get_resolved_jars_for_jar_library(self, jar_library, memo=None):
    """Collects jars for the passed jar_library.

    Because artifacts are only fetched for the "winning" version of a module, the artifacts
    will not always represent the version originally declared by the library.

    This method is transitive within the library's jar_dependencies, but will NOT
    walk into its non-jar dependencies.

    :param jar_library A JarLibrary to collect the transitive artifacts for.
    :param memo see `traverse_dependency_graph`
    :returns: all the artifacts for all of the jars in this library, including transitive deps
    :rtype: list of str
    """
    def to_resolved_jar(jar_module_ref, artifact_path):
      return ResolvedJar(coordinate=M2Coordinate(org=jar_module_ref.org, name=jar_module_ref.name,
                                                 rev=jar_module_ref.rev,
                                                 classifier=jar_module_ref.classifier),
                         cache_path=artifact_path
      )
    resolved_jars = OrderedSet()
    def create_collection(dep):
      return OrderedSet([dep])
    for jar in jar_library.jar_dependencies:
      for classifier in jar.artifact_classifiers:
        jar_module_ref = IvyModuleRef(jar.org, jar.name, jar.rev, classifier)
        for module_ref in self.traverse_dependency_graph(jar_module_ref, create_collection, memo):
          for artifact_path in self._artifacts_by_ref[module_ref.unversioned]:
            resolved_jars.add(to_resolved_jar(jar_module_ref, artifact_path))
    return resolved_jars
Example #3
0
  def checkstyle(self, targets, sources):
    runtime_classpaths = self.context.products.get_data('runtime_classpath')
    union_classpath = OrderedSet(self.tool_classpath('checkstyle'))
    for target in targets:
      runtime_classpath = runtime_classpaths.get_for_targets(target.closure(bfs=True))
      union_classpath.update(jar for conf, jar in runtime_classpath
                             if conf in self.get_options().confs)

    args = [
      '-c', self.get_options().configuration,
      '-f', 'plain'
    ]

    if self.get_options().properties:
      properties_file = os.path.join(self.workdir, 'checkstyle.properties')
      with safe_open(properties_file, 'w') as pf:
        for k, v in self.get_options().properties.items():
          pf.write('{key}={value}\n'.format(key=k, value=v))
      args.extend(['-p', properties_file])

    # We've hit known cases of checkstyle command lines being too long for the system so we guard
    # with Xargs since checkstyle does not accept, for example, @argfile style arguments.
    def call(xargs):
      return self.runjava(classpath=union_classpath, main=self._CHECKSTYLE_MAIN,
                          jvm_options=self.get_options().jvm_options,
                          args=args + xargs, workunit_name='checkstyle')
    checks = Xargs(call)

    return checks.execute(sources)
Example #4
0
  def __init__(self, name, dependencies, is_meta):
    Target.__init__(self, name, is_meta)

    self._injected_deps = []
    self.processed_dependencies = resolve(dependencies)

    self.add_label('internal')
    self.dependency_addresses = OrderedSet()
    self.dependencies = OrderedSet()
    self.internal_dependencies = OrderedSet()
    self.jar_dependencies = OrderedSet()

    # TODO(John Sirois): if meta targets were truly built outside parse contexts - we could instead
    # just use the more general check: if parsing: delay(doit) else: doit()
    # Fix how target _ids are built / addresses to not require a BUILD file - ie: support anonymous,
    # non-addressable targets - which is what meta-targets really are once created.
    if is_meta:
      # Meta targets are built outside any parse context - so update dependencies immediately
      self.update_dependencies(self.processed_dependencies)
    else:
      # Defer dependency resolution after parsing the current BUILD file to allow for forward
      # references
      self._post_construct(self.update_dependencies, self.processed_dependencies)

    self._post_construct(self.inject_dependencies)
Example #5
0
  def compute_classpath_entries(cls, targets, classpath_products, extra_classpath_tuples, confs):
    """Return the list of classpath entries for a classpath covering the passed targets.

    Filters and adds paths from extra_classpath_tuples to the end of the resulting list.

    :param targets: The targets to generate a classpath for.
    :param ClasspathProducts classpath_products: Product containing classpath elements.
    :param extra_classpath_tuples: Additional classpath entries as tuples of
      (string, ClasspathEntry).
    :param confs: The list of confs for use by this classpath.
    :returns: The classpath entries as a list of path elements.
    :rtype: list of ClasspathEntry
    """
    classpath_iter = cls._classpath_iter(
      classpath_products.get_classpath_entries_for_targets(targets),
      confs=confs,
    )
    total_classpath = OrderedSet(classpath_iter)

    filtered_extra_classpath_iter = cls._filtered_classpath_by_confs_iter(
      extra_classpath_tuples,
      confs,
    )
    extra_classpath_iter = cls._entries_iter(filtered_extra_classpath_iter)
    total_classpath.update(extra_classpath_iter)
    return list(total_classpath)
Example #6
0
  def get_resolved_jars_for_coordinates(self, coordinates, memo=None):
    """Collects jars for the passed coordinates.

    Because artifacts are only fetched for the "winning" version of a module, the artifacts
    will not always represent the version originally declared by the library.

    This method is transitive within the passed coordinates dependencies.

    :param coordinates collections.Iterable: Collection of coordinates to collect transitive
                                             resolved jars for.
    :param memo: See `traverse_dependency_graph`.
    :returns: All the artifacts for all of the jars for the provided coordinates,
              including transitive dependencies.
    :rtype: list of :class:`pants.backend.jvm.jar_dependency_utils.ResolvedJar`
    """
    def to_resolved_jar(jar_ref, jar_path):
      return ResolvedJar(coordinate=M2Coordinate(org=jar_ref.org,
                                                 name=jar_ref.name,
                                                 rev=jar_ref.rev,
                                                 classifier=jar_ref.classifier,
                                                 ext=jar_ref.ext),
                         cache_path=jar_path)
    resolved_jars = OrderedSet()
    def create_collection(dep):
      return OrderedSet([dep])
    for jar in coordinates:
      classifier = jar.classifier if self._conf == 'default' else self._conf
      jar_module_ref = IvyModuleRef(jar.org, jar.name, jar.rev, classifier)
      for module_ref in self.traverse_dependency_graph(jar_module_ref, create_collection, memo):
        for artifact_path in self._artifacts_by_ref[module_ref.unversioned]:
          resolved_jars.add(to_resolved_jar(module_ref, artifact_path))
    return resolved_jars
Example #7
0
  def execute_codegen(self, target, target_workdir):
    sources_by_base = self._calculate_sources(target)
    sources = target.sources_relative_to_buildroot()

    bases = OrderedSet(sources_by_base.keys())
    bases.update(self._proto_path_imports([target]))

    gen_flag = '--java_out'

    gen = '{0}={1}'.format(gen_flag, target_workdir)

    args = [self.protobuf_binary, gen]

    if self.plugins:
      for plugin in self.plugins:
        args.append("--{0}_out={1}".format(plugin, target_workdir))

    for base in bases:
      args.append('--proto_path={0}'.format(base))

    args.extend(sources)

    # Tack on extra path entries. These can be used to find protoc plugins
    protoc_environ = os.environ.copy()
    if self._extra_paths:
      protoc_environ['PATH'] = os.pathsep.join(self._extra_paths
                                               + protoc_environ['PATH'].split(os.pathsep))

    self.context.log.debug('Executing: {0}'.format('\\\n  '.join(args)))
    process = subprocess.Popen(args, env=protoc_environ)
    result = process.wait()
    if result != 0:
      raise TaskError('{0} ... exited non-zero ({1})'.format(self.protobuf_binary, result))
Example #8
0
class RootedProducts(object):
  """File products of a build that have a concept of a 'root' directory.

  E.g., classfiles, under a root package directory."""
  def __init__(self, root):
    self._root = root
    self._rel_paths = OrderedSet()

  def add_abs_paths(self, abs_paths):
    for abs_path in abs_paths:
      if not abs_path.startswith(self._root):
        raise Exception('{} is not under {}'.format(abs_path, self._root))
      self._rel_paths.add(os.path.relpath(abs_path, self._root))

  def add_rel_paths(self, rel_paths):
    self._rel_paths.update(rel_paths)

  def root(self):
    return self._root

  def rel_paths(self):
    return self._rel_paths

  def abs_paths(self):
    for relpath in self._rel_paths:
      yield os.path.join(self._root, relpath)

  def __bool__(self):
    return self._rel_paths

  __nonzero__ = __bool__
Example #9
0
 def get_jars_for_ivy_module(self, jar):
   ref = IvyModuleRef(jar.org, jar.name, jar.rev)
   deps = OrderedSet()
   for dep in self.deps_by_caller.get(ref, []):
     deps.add(dep)
     deps.update(self.get_jars_for_ivy_module(dep))
   return deps
Example #10
0
def dump_requirements(builder, interpreter, reqs, log, platforms=None):
  """Multi-platform dependency resolution for PEX files.

  :param builder: Dump the requirements into this builder.
  :param interpreter: The :class:`PythonInterpreter` to resolve requirements for.
  :param reqs: A list of :class:`PythonRequirement` to resolve.
  :param log: Use this logger.
  :param platforms: A list of :class:`Platform`s to resolve requirements for.
                    Defaults to the platforms specified by PythonSetup.
  """
  deduped_reqs = OrderedSet(reqs)
  find_links = OrderedSet()
  blacklist = PythonSetup.global_instance().resolver_blacklist
  for req in deduped_reqs:
    log.debug('  Dumping requirement: {}'.format(req))
    if not (req.key in blacklist and interpreter.identity.matches(blacklist[req.key])):
      builder.add_requirement(req.requirement)
    if req.repository:
      find_links.add(req.repository)

  # Resolve the requirements into distributions.
  distributions = _resolve_multi(interpreter, deduped_reqs, platforms, find_links)
  locations = set()
  for platform, dists in distributions.items():
    for dist in dists:
      if dist.location not in locations:
        log.debug('  Dumping distribution: .../{}'.format(os.path.basename(dist.location)))
        builder.add_distribution(dist)
      locations.add(dist.location)
Example #11
0
 def _create_java_target(self, target, dependees):
   genfiles = []
   for source in target.sources_relative_to_source_root():
     path = os.path.join(target.target_base, source)
     genfiles.extend(self.calculate_genfiles(path, source).get('java', []))
   spec_path = os.path.relpath(self.java_out, get_buildroot())
   address = SyntheticAddress(spec_path, target.id)
   deps = OrderedSet(self.javadeps)
   import_jars = target.imported_jars
   jars_tgt = self.context.add_new_target(SyntheticAddress(spec_path, target.id+str('-rjars')),
                                          JarLibrary,
                                          jars=import_jars,
                                          derived_from=target)
   # Add in the 'spec-rjars' target, which contains all the JarDependency targets passed in via the
   # imports parameter. Each of these jars is expected to contain .proto files bundled together
   # with their .class files.
   deps.add(jars_tgt)
   tgt = self.context.add_new_target(address,
                                     JavaLibrary,
                                     derived_from=target,
                                     sources=genfiles,
                                     provides=target.provides,
                                     dependencies=deps,
                                     excludes=target.payload.get_field_value('excludes'))
   for dependee in dependees:
     dependee.inject_dependency(tgt.address)
   return tgt
Example #12
0
  def parse_addresses(self, specs, fail_fast=False):
    """Process a list of command line specs and perform expansion.  This method can expand a list
    of command line specs.
    :param list specs: either a single spec string or a list of spec strings.
    :return: a generator of specs parsed into addresses.
    :raises: CmdLineSpecParser.BadSpecError if any of the address selectors could not be parsed.
    """
    specs = maybe_list(specs)

    addresses = OrderedSet()
    for spec in specs:
      for address in self._parse_spec(spec, fail_fast):
        addresses.add(address)

    results = filter(self._not_excluded_address, addresses)

    # Print debug information about the excluded targets
    if logger.getEffectiveLevel() <= logging.DEBUG and self._exclude_patterns:
      logger.debug('excludes:\n  {excludes}'
                   .format(excludes='\n  '.join(self._exclude_target_regexps)))
      targets = ', '.join(self._excluded_target_map[CmdLineSpecParser._UNMATCHED_KEY])
      logger.debug('Targets after excludes: {targets}'.format(targets=targets))
      excluded_count = 0
      for pattern, targets in six.iteritems(self._excluded_target_map):
        if pattern != CmdLineSpecParser._UNMATCHED_KEY:
          logger.debug('Targets excluded by pattern {pattern}\n  {targets}'
                       .format(pattern=pattern,
                               targets='\n  '.join(targets)))
          excluded_count += len(targets)
      logger.debug('Excluded {count} target{plural}.'
                   .format(count=excluded_count,
                           plural=('s' if excluded_count != 1 else '')))
    return results
Example #13
0
  def to_jar_dependencies(relative_to, jar_library_specs, build_graph):
    """Convenience method to resolve a list of specs to JarLibraries and return its jars attributes.

    Expects that the jar_libraries are declared relative to this target.

    :API: public

    :param Address relative_to: address target that references jar_library_specs, for
      error messages
    :param list jar_library_specs: string specs to JavaLibrary targets. Note, this list should be returned
      by the caller's traversable_specs() implementation to make sure that the jar_dependency jars
      have been added to the build graph.
    :param BuildGraph build_graph: build graph instance used to search for specs
    :return: list of JarDependency instances represented by the library_specs
    """
    jar_deps = OrderedSet()
    for spec in jar_library_specs:
      if not isinstance(spec, string_types):
        raise JarLibrary.ExpectedAddressError(
          "{address}: expected imports to contain string addresses, got {found_class}."
          .format(address=relative_to.spec,
                  found_class=type(spec).__name__))

      lookup = Address.parse(spec, relative_to=relative_to.spec_path)
      target = build_graph.get_target(lookup)
      if not isinstance(target, JarLibrary):
        raise JarLibrary.WrongTargetTypeError(
          "{address}: expected {spec} to be jar_library target type, got {found_class}"
          .format(address=relative_to.spec,
                  spec=spec,
                  found_class=type(target).__name__))
      jar_deps.update(target.jar_dependencies)

    return list(jar_deps)
Example #14
0
    def transitive_subgraph_of_addresses_bfs(self, addresses, predicate=None, leveled_predicate=None):
        """Returns the transitive dependency closure of `addresses` using BFS.

    :API: public

    :param list<Address> addresses: The closure of `addresses` will be walked.
    :param function predicate: If this parameter is not given, no Targets will be filtered
      out of the closure.  If it is given, any Target which fails the predicate will not be
      walked, nor will its dependencies.  Thus predicate effectively trims out any subgraph
      that would only be reachable through Targets that fail the predicate.
    :param function leveled_predicate: Behaves identically to predicate, but takes the depth of the
      target in the search tree as a second parameter, and it is checked just before a dependency is
      expanded.
    """
        ordered_closure = OrderedSet()
        # Use the DepthAgnosticWalk if we can, because DepthAwareWalk does a bit of extra work that can
        # slow things down by few millis.
        walker = self.DepthAwareWalk if leveled_predicate else self.DepthAgnosticWalk
        walk = walker()
        to_walk = deque((0, addr) for addr in addresses)
        while len(to_walk) > 0:
            level, address = to_walk.popleft()
            target = self._target_by_address[address]
            if not walk.expand_once(target, level):
                continue
            if predicate and not predicate(target):
                continue
            if walk.do_work_once(target):
                ordered_closure.add(target)
            for addr in self._target_dependencies_by_address[address]:
                if not leveled_predicate or leveled_predicate(self._target_by_address[addr], level):
                    to_walk.append((level + 1, addr))
        return ordered_closure
Example #15
0
  def execute(self):
    targets = self.context.targets()
    for conf in self.confs:
      outpath = os.path.join(self.workdir, '%s.%s.provides' %
                             (self.ivy_utils.identify(targets)[1], conf))
      if self.transitive:
        outpath += '.transitive'
      ivyinfo = self.ivy_utils.parse_xml_report(self.context.target_roots, conf)
      jar_paths = OrderedSet()
      for root in self.target_roots:
        jar_paths.update(self.get_jar_paths(ivyinfo, root, conf))

      with open(outpath, 'w') as outfile:
        def do_write(s):
          outfile.write(s)
          if self.also_write_to_stdout:
            sys.stdout.write(s)
        for jar in jar_paths:
          do_write('# from jar %s\n' % jar)
          for line in self.list_jar(jar):
            if line.endswith('.class'):
              class_name = line[:-6].replace('/', '.')
              do_write(class_name)
              do_write('\n')
      print('Wrote provides information to %s' % outpath)
Example #16
0
  def targets(self, predicate=None, postorder=False):
    """Selects targets in-play in this run from the target roots and their transitive dependencies.

    Also includes any new synthetic targets created from the target roots or their transitive
    dependencies during the course of the run.

    :API: public

    :param predicate: If specified, the predicate will be used to narrow the scope of targets
                      returned.
    :param bool postorder: `True` to gather transitive dependencies with a postorder traversal;
                          `False` or preorder by default.
    :returns: A list of matching targets.
    """
    target_set = self._collect_targets(self.target_roots, postorder=postorder)

    synthetics = OrderedSet()
    for synthetic_address in self.build_graph.synthetic_addresses:
      if self.build_graph.get_concrete_derived_from(synthetic_address) in target_set:
        synthetics.add(self.build_graph.get_target(synthetic_address))

    synthetic_set = self._collect_targets(synthetics, postorder=postorder)

    target_set.update(synthetic_set)

    return filter(predicate, target_set)
Example #17
0
  def ancestors(self):
    """Returns all BUILD files in ancestor directories of this BUILD file's parent directory."""

    def find_parent(dir):
      parent = os.path.dirname(dir)
      for parent_buildfile in BuildFile._get_all_build_files(parent):
        buildfile = os.path.join(parent, parent_buildfile)
        if os.path.exists(buildfile) and not os.path.isdir(buildfile):
          return parent, BuildFile.from_cache(self.root_dir,
                                              os.path.relpath(buildfile, self.root_dir))
      return parent, None

    parent_buildfiles = OrderedSet()

    def is_root(path):
      return os.path.abspath(self.root_dir) == os.path.abspath(path)

    parentdir = os.path.dirname(self.full_path)
    visited = set()
    while parentdir not in visited and not is_root(parentdir):
      visited.add(parentdir)
      parentdir, buildfile = find_parent(parentdir)
      if buildfile:
        parent_buildfiles.update(buildfile.family())

    return parent_buildfiles
Example #18
0
  def _resolve_java_deps(self, target):
    key = self._CONFIG_SECTION_BY_COMPILER[target.compiler]

    deps = OrderedSet()
    for dep in self.context.config.getlist(key, 'javadeps'):
        deps.update(self.context.resolve(dep))
    return deps
Example #19
0
 def get_transitive_jars(jar_lib):
   if not ivy_info:
     return OrderedSet()
   transitive_jars = OrderedSet()
   for jar in jar_lib.jar_dependencies:
     transitive_jars.update(ivy_info.get_jars_for_ivy_module(jar))
   return transitive_jars
Example #20
0
  def _resolve_overrides(self):
    """
    Resolves override targets, and then excludes and re-includes each of them
    to create and return a new dependency set.
    """
    if not self.override_targets:
      return self._pre_override_dependencies

    result = OrderedSet()

    # resolve overrides and fetch all of their "artifact-providing" dependencies
    excludes = set()
    for override_target in self.override_targets:
      # add pre_override deps of the target as exclusions
      for resolved in override_target.resolve():
        excludes.update(self._excludes(resolved))
      # prepend the target as a new target
      result.add(override_target)

    # add excludes for each artifact
    for direct_dep in self._pre_override_dependencies:
      # add relevant excludes to jar dependencies
      for jar_dep in self._jar_dependencies(direct_dep):
        for exclude in excludes:
          jar_dep.exclude(exclude.org, exclude.name)
      result.add(direct_dep)

    return result
Example #21
0
 def minimum_path(cls):
   """
     Return as a tuple the emulated sys.path and sys.path_importer_cache of
     a bare python installation, a la python -S.
   """
   from site import USER_SITE
   from twitter.common.collections import OrderedSet
   from pkg_resources import find_distributions
   from distutils.sysconfig import get_python_lib
   site_libs = set([get_python_lib(plat_specific=False), get_python_lib(plat_specific=True)])
   site_distributions = OrderedSet()
   for path_element in sys.path:
     if any(path_element.startswith(site_lib) for site_lib in site_libs):
       TRACER.log('Inspecting path element: %s' % path_element)
       site_distributions.update(dist.location for dist in find_distributions(path_element))
   user_site_distributions = OrderedSet(dist.location for dist in find_distributions(USER_SITE))
   for path in site_distributions:
     TRACER.log('Scrubbing from site-packages: %s' % path)
   for path in user_site_distributions:
     TRACER.log('Scrubbing from user site: %s' % path)
   scrub_paths = site_distributions | user_site_distributions
   scrubbed_sys_path = list(OrderedSet(sys.path) - scrub_paths)
   scrub_from_importer_cache = filter(
     lambda key: any(key.startswith(path) for path in scrub_paths),
     sys.path_importer_cache.keys())
   scrubbed_importer_cache = dict((key, value) for (key, value) in sys.path_importer_cache.items()
     if key not in scrub_from_importer_cache)
   return scrubbed_sys_path, scrubbed_importer_cache
Example #22
0
  def get_artifacts_for_jar_library(self, jar_library, memo=None):
    """Collects IvyArtifact instances for the passed jar_library.

    Because artifacts are only fetched for the "winning" version of a module, the artifacts
    will not always represent the version originally declared by the library.

    This method is transitive within the library's jar_dependencies, but will NOT
    walk into its non-jar dependencies.

    :param jar_library A JarLibrary to collect the transitive artifacts for.
    :param memo see `traverse_dependency_graph`
    """
    artifacts = OrderedSet()
    def create_collection(dep):
      return OrderedSet([dep])
    for jar in jar_library.jar_dependencies:
      jar_module_ref = IvyModuleRef(jar.org, jar.name, jar.rev)
      valid_classifiers = jar.artifact_classifiers
      artifacts_for_jar = []
      for module_ref in self.traverse_dependency_graph(jar_module_ref, create_collection, memo):
        artifacts_for_jar.extend(
          artifact for artifact in self._artifacts_by_ref[module_ref.unversioned]
          if artifact.classifier in valid_classifiers
        )

      artifacts.update(artifacts_for_jar)
    return artifacts
Example #23
0
def extract_target(java_targets, is_classpath):
  primary_target = InternalTarget.sort_targets(java_targets)[0]

  with ParseContext.temp(primary_target.target_base):
    internal_deps, jar_deps = _extract_target(java_targets, is_classpath)

    # TODO(John Sirois): make an empty source set work in ant/compile.xml
    sources = [ '__no_source__' ]

    all_deps = OrderedSet()
    all_deps.update(internal_deps)
    all_deps.update(jar_deps)

    if is_java(primary_target):
      return JavaLibrary('ide',
                         sources,
                         dependencies = all_deps,
                         is_meta = True)
    elif is_scala(primary_target):
      return ScalaLibrary('ide',
                          sources,
                          dependencies = all_deps,
                          is_meta = True)
    else:
      raise TypeError("Cannot generate IDE configuration for targets: %s" % java_targets)
Example #24
0
 def dependents_of_addresses(self, addresses):
   """Given an iterable of addresses, yield all of those addresses dependents."""
   seen = OrderedSet(addresses)
   for address in addresses:
     seen.update(self._dependent_address_map[address])
     seen.update(self._implicit_dependent_address_map[address])
   return seen
Example #25
0
 def create_geninfo(key):
   gen_info = context.config.getdict('thrift-gen', key)
   gen = gen_info['gen']
   deps = OrderedSet()
   for dep in gen_info['deps']:
     deps.update(context.resolve(dep))
   return ThriftGen.GenInfo(gen, deps)
Example #26
0
  def bundle(self, app):
    """Create a self-contained application bundle.

    The bundle will contain the target classes, dependencies and resources.
    """
    assert(isinstance(app, BundleCreate.App))

    def verbose_symlink(src, dst):
      try:
        os.symlink(src, dst)
      except OSError as e:
        self.context.log.error("Unable to create symlink: {0} -> {1}".format(src, dst))
        raise e

    bundle_dir = os.path.join(self._outdir, '%s-bundle' % app.basename)
    self.context.log.info('creating %s' % os.path.relpath(bundle_dir, get_buildroot()))

    safe_mkdir(bundle_dir, clean=True)

    classpath = OrderedSet()
    # If creating a deployjar, we add the external dependencies to the bundle as
    # loose classes, and have no classpath. Otherwise we add the external dependencies
    # to the bundle as jars in a libs directory.
    if not self._create_deployjar:
      lib_dir = os.path.join(bundle_dir, 'libs')
      os.mkdir(lib_dir)

      jarmap = self.context.products.get('jars')

      def add_jars(target):
        generated = jarmap.get(target)
        if generated:
          for base_dir, internal_jars in generated.items():
            for internal_jar in internal_jars:
              verbose_symlink(os.path.join(base_dir, internal_jar), os.path.join(lib_dir, internal_jar))
              classpath.add(internal_jar)

      app.binary.walk(add_jars, lambda t: t != app.binary)

      # Add external dependencies to the bundle.
      for basedir, external_jar in self.list_external_jar_dependencies(app.binary):
        path = os.path.join(basedir, external_jar)
        verbose_symlink(path, os.path.join(lib_dir, external_jar))
        classpath.add(external_jar)

    bundle_jar = os.path.join(bundle_dir, '%s.jar' % app.binary.basename)

    with self.monolithic_jar(app.binary, bundle_jar,
                             with_external_deps=self._create_deployjar) as jar:
      self.add_main_manifest_entry(jar, app.binary)
      if classpath:
        jar.classpath([os.path.join('libs', jar) for jar in classpath])

    for bundle in app.bundles:
      for path, relpath in bundle.filemap.items():
        bundle_path = os.path.join(bundle_dir, relpath)
        safe_mkdir(os.path.dirname(bundle_path))
        verbose_symlink(path, bundle_path)

    return bundle_dir
Example #27
0
  def _detect_cycle(self, src, dest):
    """Given a src and a dest, each of which _might_ already exist in the graph, detect cycles.

    Return a path of Nodes that describe the cycle, or None.
    """
    path = OrderedSet()
    walked = set()
    def _walk(node):
      if node in path:
        return tuple(path) + (node,)
      if node in walked:
        return None
      path.add(node)
      walked.add(node)

      for dep in self.dependencies_of(node):
        found = _walk(dep)
        if found is not None:
          return found
      path.discard(node)
      return None

    # Initialize the path with src (since the edge from src->dest may not actually exist), and
    # then walk from the dest.
    path.update([src])
    return _walk(dest)
Example #28
0
  def _create_doc_target(self):
    all_sources = []
    all_deps = OrderedSet()
    for target in self.targets:
      if not self.only_provides or is_exported(target):
        for source in target.sources:
          source_path = os.path.join(self.java_src_prefix, source)
          if os.path.exists(source_path):
            all_sources.append(source_path)
          else:
            print "skipping %s" % source_path

          for jar_dep in target.jar_dependencies:
            if jar_dep.rev:
              all_deps.add(copy(jar_dep).intransitive())

    def create_meta_target():
      return JavaLibrary('pants.doc.deps',
                         all_sources,
                         provides = None,
                         dependencies = all_deps,
                         excludes = None,
                         resources = None,
                         binary_resources = None,
                         deployjar = False,
                         buildflags = None,
                         is_meta = True)

    # TODO(John Sirois): Find a better way to do_in_context when we don't care about the context
    return list(self.targets)[0].do_in_context(create_meta_target)
Example #29
0
File: rules.py Project: kwlzn/pants
 def _flatten_type_constraints(self, selection_products):
   type_constraints = filter(lambda o: isinstance(o, Exactly), selection_products)
   non_type_constraints = filter(lambda o: not isinstance(o, Exactly), selection_products)
   flattened_products = OrderedSet(non_type_constraints)
   for t in type_constraints:
     flattened_products.update(t.types)
   return flattened_products
Example #30
0
  def _compute_sources(self, target):
    relative_sources = OrderedSet()
    source_roots = OrderedSet()

    def capture_and_relativize_to_source_root(source):
      source_root = self.context.source_roots.find_by_path(source)
      if not source_root:
        source_root = self.context.source_roots.find(target)
      source_roots.add(source_root.path)
      return fast_relpath(source, source_root.path)

    if target.payload.get_field_value('ordered_sources'):
      # Re-match the filespecs against the sources in order to apply them in the literal order
      # they were specified in.
      filespec = target.globs_relative_to_buildroot()
      excludes = filespec.get('excludes', [])
      for filespec in filespec.get('globs', []):
        sources = [s for s in target.sources_relative_to_buildroot()
                   if globs_matches([s], [filespec], excludes)]
        if len(sources) != 1:
          raise TargetDefinitionException(
              target,
              'With `ordered_sources=True`, expected one match for each file literal, '
              'but got: {} for literal `{}`.'.format(sources, filespec)
            )
        relative_sources.add(capture_and_relativize_to_source_root(sources[0]))
    else:
      # Otherwise, use the default (unspecified) snapshot ordering.
      for source in target.sources_relative_to_buildroot():
        relative_sources.add(capture_and_relativize_to_source_root(source))
    return relative_sources, source_roots
Example #31
0
    def wrapper(func):
        if not inspect.isfunction(func):
            raise ValueError(
                'The @rule decorator must be applied innermost of all decorators.'
            )

        owning_module = sys.modules[func.__module__]
        source = inspect.getsource(func)
        beginning_indent = _get_starting_indent(source)
        if beginning_indent:
            source = "\n".join(line[beginning_indent:]
                               for line in source.split("\n"))
        module_ast = ast.parse(source)

        def resolve_type(name):
            resolved = getattr(owning_module, name,
                               None) or owning_module.__builtins__.get(
                                   name, None)
            if resolved is None:
                raise ValueError(
                    f'Could not resolve type `{name}` in top level of module {owning_module.__name__}'
                )
            elif not isinstance(resolved, type):
                raise ValueError(
                    f'Expected a `type` constructor for `{name}`, but got: {resolved} (type `{type(resolved).__name__}`)'
                )
            return resolved

        gets = OrderedSet()
        rule_func_node = assert_single_element(
            node for node in ast.iter_child_nodes(module_ast) if
            isinstance(node, ast.FunctionDef) and node.name == func.__name__)

        parents_table = {}
        for parent in ast.walk(rule_func_node):
            for child in ast.iter_child_nodes(parent):
                parents_table[child] = parent

        rule_visitor = _RuleVisitor(
            func=func,
            func_node=rule_func_node,
            func_source=source,
            orig_indent=beginning_indent,
            parents_table=parents_table,
        )
        rule_visitor.visit(rule_func_node)
        gets.update(
            Get.create_statically_for_rule_graph(resolve_type(p),
                                                 resolve_type(s))
            for p, s in rule_visitor.gets)

        # Register dependencies for @console_rule/Goal.
        if is_goal_cls:
            dependency_rules = (optionable_rule(output_type.Options), )
        else:
            dependency_rules = None

        func.rule = TaskRule(
            output_type,
            tuple(input_selectors),
            func,
            input_gets=tuple(gets),
            dependency_rules=dependency_rules,
            cacheable=cacheable,
        )

        return func
Example #32
0
 def configurations(self):
     confs = OrderedSet(self._configurations)
     confs.update(artifact.conf for artifact in self.artifacts
                  if artifact.conf)
     return list(confs)
Example #33
0
    def bundle(self, app):
        """Create a self-contained application bundle containing the target
    classes, dependencies and resources.
    """
        assert (isinstance(app, BundleCreate.App))

        bundledir = os.path.join(self.outdir, '%s-bundle' % app.basename)
        self.context.log.info('creating %s' %
                              os.path.relpath(bundledir, get_buildroot()))

        safe_mkdir(bundledir, clean=True)

        classpath = OrderedSet()
        if not self.deployjar:
            libdir = os.path.join(bundledir, 'libs')
            os.mkdir(libdir)

            # Add internal dependencies to the bundle.
            def add_jars(target):
                target_jars = self.context.products.get('jars').get(target)
                if target_jars is not None:
                    for basedir, jars in target_jars.items():
                        for internaljar in jars:
                            os.symlink(os.path.join(basedir, internaljar),
                                       os.path.join(libdir, internaljar))
                            classpath.add(internaljar)

            app.binary.walk(add_jars, lambda t: t.is_internal)

            # Add external dependencies to the bundle.
            for basedir, externaljar in self.list_jar_dependencies(app.binary):
                path = os.path.join(basedir, externaljar)
                os.symlink(path, os.path.join(libdir, externaljar))
                classpath.add(externaljar)

        for basedir, jars in self.context.products.get('jars').get(
                app.binary).items():
            if len(jars) != 1:
                raise TaskError(
                    'Expected 1 mapped binary for %s but found: %s' %
                    (app.binary, jars))

            binary = jars[0]
            binary_jar = os.path.join(basedir, binary)
            bundle_jar = os.path.join(bundledir, binary)
            if not classpath:
                os.symlink(binary_jar, bundle_jar)
            else:
                with open_zip(binary_jar, 'r') as src:
                    with open_zip(bundle_jar, 'w',
                                  compression=ZIP_DEFLATED) as dest:
                        for item in src.infolist():
                            buf = src.read(item.filename)
                            if Manifest.PATH == item.filename:
                                manifest = Manifest(buf)
                                manifest.addentry(
                                    Manifest.CLASS_PATH, ' '.join(
                                        os.path.join('libs', jar)
                                        for jar in classpath))
                                buf = manifest.contents()
                            dest.writestr(item, buf)

        for bundle in app.bundles:
            for path, relpath in bundle.filemap.items():
                bundlepath = os.path.join(bundledir, relpath)
                safe_mkdir(os.path.dirname(bundlepath))
                os.symlink(path, bundlepath)

        return bundledir
Example #34
0
 def add_task(product_type, rule):
     # TODO(#7311): make a defaultdict-like wrapper for OrderedDict if more widely used.
     if product_type not in serializable_rules:
         serializable_rules[product_type] = OrderedSet()
     serializable_rules[product_type].add(rule)
Example #35
0
 def sections(self):
     ret = OrderedSet()
     for cfg in self.configs:
         ret.update(cfg.sections())
     return ret
Example #36
0
    def test_source_root_tree(self):
        tree = SourceRootTree()
        self.assertEquals((None, None), tree.get_root_and_types(""))
        self.assertEquals((None, None),
                          tree.get_root_and_types("tests/language"))
        self.assertEquals((None, None),
                          tree.get_root_and_types("tests/language/foo"))
        self.assertEquals((None, None),
                          tree.get_root_and_types("src/language"))
        self.assertEquals((None, None), tree.get_root_and_types("src"))

        tree.add_root("tests/language", set([NotTestTarget, TestTarget]))
        self.assertEquals(
            ("tests/language", OrderedSet([NotTestTarget, TestTarget])),
            tree.get_root_and_types("tests/language"),
            msg="Failed for tree: {dump}".format(dump=tree._dump()))
        root, types = tree.get_root_and_types("tests/language/foo")
        self.assertEquals(
            "tests/language",
            root,
            msg="Failed for tree: {dump}".format(dump=tree._dump()))
        self.assertEquals(
            set(types),
            set([NotTestTarget, TestTarget]),
            msg="Failed for tree: {dump}".format(dump=tree._dump()))
        self.assertEquals(
            (None, None),
            tree.get_root_and_types("src"),
            msg="Failed for tree: {dump}".format(dump=tree._dump()))
        self.assertEquals(
            (None, None),
            tree.get_root_and_types("src/bar"),
            msg="Failed for tree: {dump}".format(dump=tree._dump()))
        self.assertEquals(
            (None, None),
            tree.get_root_and_types("s"),
            msg="Failed for tree: {dump}".format(dump=tree._dump()))

        tree.add_root("src/language", set([NotTestTarget]))
        self.assertEquals(
            ("tests/language", OrderedSet([NotTestTarget, TestTarget])),
            tree.get_root_and_types("tests/language"),
            msg="Failed for tree: {dump}".format(dump=tree._dump()))
        self.assertEquals(
            ("tests/language", OrderedSet([NotTestTarget, TestTarget])),
            tree.get_root_and_types("tests/language/foo"),
            msg="Failed for tree: {dump}".format(dump=tree._dump()))
        self.assertEquals(
            ("src/language", OrderedSet([NotTestTarget])),
            tree.get_root_and_types("src/language"),
            msg="Failed for tree: {dump}".format(dump=tree._dump()))
        self.assertEquals(
            ("src/language", OrderedSet([NotTestTarget])),
            tree.get_root_and_types("src/language/bar"),
            msg="Failed for tree: {dump}".format(dump=tree._dump()))
        self.assertEquals(
            (None, None),
            tree.get_root_and_types("src"),
            msg="Failed for tree: {dump}".format(dump=tree._dump()))
        with self.assertRaises(SourceRootTree.DuplicateSourceRootError):
            tree.add_root("tests/language", set([NotTestTarget]))
        with self.assertRaises(SourceRootTree.NestedSourceRootError):
            tree.add_root("tests", set([NotTestTarget]))
Example #37
0
 def __init__(self):
   self.internal_dependencies = OrderedSet()
Example #38
0
def _extract_target(meta_target, is_transitive, is_apt):
  """
    Extracts the minimal set of internal dependencies and external jar dependencies from the given
    (meta) target so that an ide can run any required custom annotation processors and resolve all
    symbols.

    The extraction algorithm proceeds under the following assumptions:
    1.) A custom annotation processor (or even a codegen target) may have internal dependencies
    2.) An IDE need not have any compiled classes for ide compilable sources on its classpath in
        order to resolve symbols, it just needs any custom annotation processors, custom codegen'ed
        classes and any external jars dependencies

    The algorithm then proceeds to categorize each target as either ide classpath required target
    or not.  If the target is required on the ide classpath, it is retained and grafted into the
    graph of internal dependencies returned.  If not, the target's jar dependencies are added to the
    set of all external jar dependencies required on the ide classpath.  Finally the tuple of all
    collected (internal dependencies, jar dependencies) is returned.

    The assumptions noted above imply that any internal target dependended on by an ide classpath
    required target must also be grafted into the graph of internal dependencies returned.
  """

  class RootNode(object):
    def __init__(self):
      self.internal_dependencies = OrderedSet()

  root_target = RootNode()

  codegen_graph = deque([])
  codegen_graph.appendleft(root_target)
  jar_deps = OrderedSet()

  visited = set()
  def sift_targets(target, add_deps = False):
    if target not in visited:
      visited.add(target)
      is_needed_on_ide_classpath = add_deps or target.is_codegen or is_apt(target)

      if is_needed_on_ide_classpath:
        codegen_graph[0].internal_dependencies.add(target)
      else:
        for jar_dependency in target.jar_dependencies:
          if jar_dependency.rev:
            if is_transitive(target):
              jar_deps.add(jar_dependency)
            else:
              jar_deps.add(copy(jar_dependency).intransitive())

      if is_needed_on_ide_classpath:
        codegen_graph.appendleft(target)

      for internal_target in list(target.internal_dependencies):
        target.internal_dependencies.discard(internal_target)
        sift_targets(internal_target, is_needed_on_ide_classpath)

      if is_needed_on_ide_classpath:
        codegen_graph.popleft()

  sift_targets(meta_target)

  assert len(codegen_graph) == 1 and codegen_graph[0] == root_target,\
    "Unexpected walk: %s" % codegen_graph

  return codegen_graph.popleft().internal_dependencies, jar_deps
Example #39
0
class BuildConfiguration(object):
    """Stores the types and helper functions exposed to BUILD files."""
    class ParseState(
            namedtuple('ParseState', ['parse_context', 'parse_globals'])):
        @property
        def objects(self):
            return self.parse_context._storage.objects

    def __init__(self):
        self._target_by_alias = {}
        self._target_macro_factory_by_alias = {}
        self._exposed_object_by_alias = {}
        self._exposed_context_aware_object_factory_by_alias = {}
        self._optionables = OrderedSet()
        self._rules = OrderedSet()

    def registered_aliases(self):
        """Return the registered aliases exposed in BUILD files.

    These returned aliases aren't so useful for actually parsing BUILD files.
    They are useful for generating things like http://pantsbuild.github.io/build_dictionary.html.

    :returns: A new BuildFileAliases instance containing this BuildConfiguration's registered alias
              mappings.
    :rtype: :class:`pants.build_graph.build_file_aliases.BuildFileAliases`
    """
        target_factories_by_alias = self._target_by_alias.copy()
        target_factories_by_alias.update(self._target_macro_factory_by_alias)
        return BuildFileAliases(
            targets=target_factories_by_alias,
            objects=self._exposed_object_by_alias.copy(),
            context_aware_object_factories=self.
            _exposed_context_aware_object_factory_by_alias.copy())

    def register_aliases(self, aliases):
        """Registers the given aliases to be exposed in parsed BUILD files.

    :param aliases: The BuildFileAliases to register.
    :type aliases: :class:`pants.build_graph.build_file_aliases.BuildFileAliases`
    """
        if not isinstance(aliases, BuildFileAliases):
            raise TypeError(
                'The aliases must be a BuildFileAliases, given {}'.format(
                    aliases))

        for alias, target_type in aliases.target_types.items():
            self._register_target_alias(alias, target_type)

        for alias, target_macro_factory in aliases.target_macro_factories.items(
        ):
            self._register_target_macro_factory_alias(alias,
                                                      target_macro_factory)

        for alias, obj in aliases.objects.items():
            self._register_exposed_object(alias, obj)

        for alias, context_aware_object_factory in aliases.context_aware_object_factories.items(
        ):
            self._register_exposed_context_aware_object_factory(
                alias, context_aware_object_factory)

    # TODO(John Sirois): Warn on alias override across all aliases since they share a global
    # namespace in BUILD files.
    # See: https://github.com/pantsbuild/pants/issues/2151
    def _register_target_alias(self, alias, target_type):
        if alias in self._target_by_alias:
            logger.debug(
                'Target alias {} has already been registered. Overwriting!'.
                format(alias))

        self._target_by_alias[alias] = target_type
        self.register_optionables(target_type.subsystems())

    def _register_target_macro_factory_alias(self, alias,
                                             target_macro_factory):
        if alias in self._target_macro_factory_by_alias:
            logger.debug(
                'TargetMacro alias {} has already been registered. Overwriting!'
                .format(alias))

        self._target_macro_factory_by_alias[alias] = target_macro_factory
        for target_type in target_macro_factory.target_types:
            self.register_optionables(target_type.subsystems())

    def _register_exposed_object(self, alias, obj):
        if alias in self._exposed_object_by_alias:
            logger.debug(
                'Object alias {} has already been registered. Overwriting!'.
                format(alias))

        self._exposed_object_by_alias[alias] = obj
        # obj doesn't implement any common base class, so we have to test for this attr.
        if hasattr(obj, 'subsystems'):
            self.register_optionables(obj.subsystems())

    def _register_exposed_context_aware_object_factory(
            self, alias, context_aware_object_factory):
        if alias in self._exposed_context_aware_object_factory_by_alias:
            logger.debug(
                'This context aware object factory alias {} has already been registered. '
                'Overwriting!'.format(alias))

        self._exposed_context_aware_object_factory_by_alias[
            alias] = context_aware_object_factory

    @deprecated('1.15.0.dev1', hint_message='Use self.register_optionables().')
    def register_subsystems(self, subsystems):
        return self.register_optionables(subsystems)

    def register_optionables(self, optionables):
        """Registers the given subsystem types.

    :param optionables: The Optionable types to register.
    :type optionables: :class:`collections.Iterable` containing
                       :class:`pants.option.optionable.Optionable` subclasses.
    """
        if not isinstance(optionables, Iterable):
            raise TypeError(
                'The optionables must be an iterable, given {}'.format(
                    optionables))
        optionables = tuple(optionables)
        if not optionables:
            return

        invalid_optionables = [
            s for s in optionables
            if not isinstance(s, type) or not issubclass(s, Optionable)
        ]
        if invalid_optionables:
            raise TypeError(
                'The following items from the given optionables are not Optionable '
                'subclasses:\n\t{}'.format('\n\t'.join(
                    str(i) for i in invalid_optionables)))

        self._optionables.update(optionables)

    def optionables(self):
        """Returns the registered Optionable types.

    :rtype set
    """
        return self._optionables

    @deprecated('1.15.0.dev1', hint_message='Use self.optionables().')
    def subsystems(self):
        """Returns the registered Subsystem types.

    :rtype set
    """
        return {o for o in self._optionables if issubclass(o, Subsystem)}

    def register_rules(self, rules):
        """Registers the given rules.

    param rules: The rules to register.
    :type rules: :class:`collections.Iterable` containing
                 :class:`pants.engine.rules.Rule` instances.
    """
        if not isinstance(rules, Iterable):
            raise TypeError(
                'The rules must be an iterable, given {!r}'.format(rules))

        # "Index" the rules to normalize them and expand their dependencies.
        indexed_rules = RuleIndex.create(rules).normalized_rules()

        # Store the rules and record their dependency Optionables.
        self._rules.update(indexed_rules)
        dependency_optionables = {
            do
            for rule in indexed_rules for do in rule.dependency_optionables
            if rule.dependency_optionables
        }
        self.register_optionables(dependency_optionables)

    def rules(self):
        """Returns the registered rules.

    :rtype list
    """
        return list(self._rules)

    @memoized_method
    def _get_addressable_factory(self, target_type, alias):
        return TargetAddressable.factory(target_type=target_type, alias=alias)

    def initialize_parse_state(self, build_file):
        """Creates a fresh parse state for the given build file.

    :param build_file: The BUILD file to set up a new ParseState for.
    :type build_file: :class:`pants.base.build_file.BuildFile`
    :returns: A fresh ParseState for parsing the given `build_file` with.
    :rtype: :class:`BuildConfiguration.ParseState`
    """
        # TODO(John Sirois): Introduce a factory method to seal the BuildConfiguration and add a check
        # there that all anonymous types are covered by context aware object factories that are
        # Macro instances.  Without this, we could have non-Macro context aware object factories being
        # asked to be a BuildFileTargetFactory when they are not (in SourceRoot registration context).
        # See: https://github.com/pantsbuild/pants/issues/2125
        type_aliases = self._exposed_object_by_alias.copy()
        parse_context = ParseContext(rel_path=build_file.spec_path,
                                     type_aliases=type_aliases)

        def create_call_proxy(tgt_type, tgt_alias=None):
            def registration_callback(address, addressable):
                parse_context._storage.add(addressable,
                                           name=address.target_name)

            addressable_factory = self._get_addressable_factory(
                tgt_type, tgt_alias)
            return AddressableCallProxy(
                addressable_factory=addressable_factory,
                build_file=build_file,
                registration_callback=registration_callback)

        # Expose all aliased Target types.
        for alias, target_type in self._target_by_alias.items():
            proxy = create_call_proxy(target_type, alias)
            type_aliases[alias] = proxy

        # Expose aliases for exposed objects and targets in the BUILD file.
        parse_globals = type_aliases.copy()

        # Now its safe to add mappings from both the directly exposed and macro-created target types to
        # their call proxies for context awares and macros to use to manufacture targets by type
        # instead of by alias.
        for alias, target_type in self._target_by_alias.items():
            proxy = type_aliases[alias]
            type_aliases[target_type] = proxy

        for target_macro_factory in self._target_macro_factory_by_alias.values(
        ):
            for target_type in target_macro_factory.target_types:
                proxy = create_call_proxy(target_type)
                type_aliases[target_type] = proxy

        for alias, object_factory in self._exposed_context_aware_object_factory_by_alias.items(
        ):
            parse_globals[alias] = object_factory(parse_context)

        for alias, target_macro_factory in self._target_macro_factory_by_alias.items(
        ):
            parse_globals[alias] = target_macro_factory.target_macro(
                parse_context)

        return self.ParseState(parse_context, parse_globals)
Example #40
0
 def normalized_rules(self):
     rules = OrderedSet(rule for ruleset in self.rules.values()
                        for rule in ruleset)
     rules.update(self.roots)
     return self.NormalizedRules(rules, self.union_rules)
Example #41
0
def enum(field_name, all_values):
    """A datatype which can take on a finite set of values. This method is experimental and unstable.

  Any enum subclass can be constructed with its create() classmethod. This method will use the first
  element of `all_values` as the enum value if none is specified.

  :param field_name: A string used as the field for the datatype. Note that enum does not yet
                     support type checking as with datatype.
  :param all_values: An iterable of objects representing all possible values for the enum.
                     NB: `all_values` must be a finite, non-empty iterable with unique values!
  """

    # This call to list() will eagerly evaluate any `all_values` which would otherwise be lazy, such
    # as a generator.
    all_values_realized = list(all_values)
    # `OrderedSet` maintains the order of the input iterable, but is faster to check membership.
    allowed_values_set = OrderedSet(all_values_realized)

    if len(allowed_values_set) < len(all_values_realized):
        raise ValueError(
            "When converting all_values ({}) to a set, at least one duplicate "
            "was detected. The unique elements of all_values were: {}.".format(
                all_values_realized, allowed_values_set))

    class ChoiceDatatype(datatype([field_name])):
        allowed_values = allowed_values_set
        default_value = next(iter(allowed_values))

        @memoized_classproperty
        def _singletons(cls):
            """Generate memoized instances of this enum wrapping each of this enum's allowed values."""
            return {value: cls(value) for value in cls.allowed_values}

        @classmethod
        def _check_value(cls, value):
            if value not in cls.allowed_values:
                raise cls.make_type_error(
                    "Value {!r} for '{}' must be one of: {!r}.".format(
                        value, field_name, cls.allowed_values))

        @classmethod
        def create(cls, value=None):
            # If we get an instance of this enum class, just return it. This means you can call .create()
            # on None, an allowed value for the enum, or an existing instance of the enum.
            if isinstance(value, cls):
                return value

            # Providing an explicit value that is not None will *not* use the default value!
            if value is None:
                value = cls.default_value

            # We actually circumvent the constructor in this method due to the cls._singletons
            # memoized_classproperty, but we want to raise the same error, so we move checking into a
            # common method.
            cls._check_value(value)

            return cls._singletons[value]

        def __new__(cls, *args, **kwargs):
            this_object = super(ChoiceDatatype,
                                cls).__new__(cls, *args, **kwargs)

            field_value = getattr(this_object, field_name)

            cls._check_value(field_value)

            return this_object

    return ChoiceDatatype
Example #42
0
  def execute(self, targets):
    """Resolves the specified confs for the configured targets and returns an iterator over
    tuples of (conf, jar path).
    """
    def dirname_for_requested_targets(targets):
      """Where we put the classpath file for this set of targets."""
      sha = hashlib.sha1()
      for t in targets:
        sha.update(t.id)
      return sha.hexdigest()

    def is_classpath(target):
      return is_jar(target) or (
        is_internal(target) and any(jar for jar in target.jar_dependencies if jar.rev)
      )

    groups = self.context.products.get_data('exclusives_groups')

    # Below, need to take the code that actually execs ivy, and invoke it once for each
    # group. Then after running ivy, we need to take the resulting classpath, and load it into
    # the build products.

    # The set of groups we need to consider is complicated:
    # - If there are no conflicting exclusives (ie, there's only one entry in the map),
    #   then we just do the one.
    # - If there are conflicts, then there will be at least three entries in the groups map:
    #   - the group with no exclusives (X)
    #   - the two groups that are in conflict (A and B).
    # In the latter case, we need to do the resolve twice: Once for A+X, and once for B+X,
    # because things in A and B can depend on things in X; and so they can indirectly depend
    # on the dependencies of X. (I think this well be covered by the computed transitive dependencies of
    # A and B. But before pushing this change, review this comment, and make sure that this is
    # working correctly.
    for group_key in groups.get_group_keys():
      # Narrow the groups target set to just the set of targets that we're supposed to build.
      # Normally, this shouldn't be different from the contents of the group.
      group_targets = groups.get_targets_for_group_key(group_key) & set(targets)

      classpath_targets = OrderedSet()
      for target in group_targets:
        classpath_targets.update(filter(is_classpath, filter(is_concrete, target.resolve())))

      target_workdir = os.path.join(self._work_dir, dirname_for_requested_targets(group_targets))
      target_classpath_file = os.path.join(target_workdir, 'classpath')
      with self.invalidated(classpath_targets, only_buildfiles=True,
                            invalidate_dependents=True) as invalidation_check:
        # Note that it's possible for all targets to be valid but for no classpath file to exist at
        # target_classpath_file, e.g., if we previously build a superset of targets.
        if invalidation_check.invalid_vts or not os.path.exists(target_classpath_file):
          self._exec_ivy(target_workdir, targets, [
            '-cachepath', target_classpath_file,
            '-confs'
          ] + self._confs)

      if not os.path.exists(target_classpath_file):
        print ('Ivy failed to create classpath file at %s %s' % target_classpath_file)

      def safe_link(src, dest):
        if os.path.exists(dest):
          os.unlink(dest)
        os.symlink(src, dest)

      # TODO(benjy): Is this symlinking valid in the presence of multiple exclusives groups?
      # Should probably get rid of it and use a local artifact cache instead.
      # Symlink to the current classpath file.
      safe_link(target_classpath_file, self._classpath_file)

      # Symlink to the current ivy.xml file (useful for IDEs that read it).
      ivyxml_symlink = os.path.join(self._work_dir, 'ivy.xml')
      target_ivyxml = os.path.join(target_workdir, 'ivy.xml')
      safe_link(target_ivyxml, ivyxml_symlink)

      if os.path.exists(self._classpath_file):
        with self._cachepath(self._classpath_file) as classpath:
          for path in classpath:
            if self._map_jar(path):
              for conf in self._confs:
                groups.update_compatible_classpaths(group_key, [(conf, path.strip())])

    if self._report:
      self._generate_ivy_report()

    if self.context.products.isrequired("ivy_jar_products"):
      self._populate_ivy_jar_products()

    create_jardeps_for = self.context.products.isrequired(self._mapfor_typename())
    if create_jardeps_for:
      genmap = self.context.products.get(self._mapfor_typename())
      for target in filter(create_jardeps_for, targets):
        self._mapjars(genmap, target)
Example #43
0
  def _compute_missing_deps(self, srcs, actual_deps):
    """Computes deps that are used by the compiler but not specified in a BUILD file.

    These deps are bugs waiting to happen: the code may happen to compile because the dep was
    brought in some other way (e.g., by some other root target), but that is obviously fragile.

    Note that in practice we're OK with reliance on indirect deps that are only brought in
    transitively. E.g., in Scala type inference can bring in such a dep subtly. Fortunately these
    cases aren't as fragile as a completely missing dependency. It's still a good idea to have
    explicit direct deps where relevant, so we optionally warn about indirect deps, to make them
    easy to find and reason about.

    - actual_deps: a map src -> list of actual deps (source, class or jar file) as noted by the
      compiler.

    Returns a triple (missing_file_deps, missing_tgt_deps, missing_direct_tgt_deps) where:

    - missing_file_deps: a list of pairs (src_tgt, dep_file) where src_tgt requires dep_file, and
      we're unable to map to a target (because its target isn't in the total set of targets in play,
      and we don't want to parse every BUILD file in the workspace just to find it).

    - missing_tgt_deps: a list of pairs (src_tgt, dep_tgt) where src_tgt is missing a necessary
                        transitive dependency on dep_tgt.

    - missing_direct_tgt_deps: a list of pairs (src_tgt, dep_tgt) where src_tgt is missing a direct
                               dependency on dep_tgt but has a transitive dep on it.

    All paths in the input and output are absolute.
    """
    def must_be_explicit_dep(dep):
      # We don't require explicit deps on the java runtime, so we shouldn't consider that
      # a missing dep.
      return not dep.startswith(self._context.java_home)

    def target_or_java_dep_in_targets(target, targets):
      # We want to check if the target is in the targets collection
      #
      # However, for the special case of scala_library that has a java_sources
      # reference we're ok if that exists in targets even if the scala_library does not.

      if target in targets:
        return True
      elif target.is_scala:
        return any(t in targets for t in target.java_sources)
      else:
        return False

    # TODO: If recomputing these every time becomes a performance issue, memoize for
    # already-seen targets and incrementally compute for new targets not seen in a previous
    # partition, in this or a previous chunk.
    targets_by_file = self._compute_targets_by_file()
    transitive_deps_by_target = self._compute_transitive_deps_by_target()

    # Find deps that are actual but not specified.
    with self._context.new_workunit(name='scan_deps'):
      missing_file_deps = OrderedSet()  # (src, src).
      missing_tgt_deps_map = defaultdict(list)  # (tgt, tgt) -> a list of (src, src) as evidence.
      missing_direct_tgt_deps_map = defaultdict(list)  # The same, but for direct deps.

      buildroot = get_buildroot()
      abs_srcs = [os.path.join(buildroot, src) for src in srcs]
      for src in abs_srcs:
        src_tgt = next(iter(targets_by_file.get(src)))
        if src_tgt is not None:
          for actual_dep in filter(must_be_explicit_dep, actual_deps.get(src, [])):
            actual_dep_tgts = targets_by_file.get(actual_dep)
            # actual_dep_tgts is usually a singleton. If it's not, we only need one of these
            # to be in our declared deps to be OK.
            if actual_dep_tgts is None:
              missing_file_deps.add((src_tgt, actual_dep))
            elif not target_or_java_dep_in_targets(src_tgt, actual_dep_tgts):
              # Obviously intra-target deps are fine.
              canonical_actual_dep_tgt = next(iter(actual_dep_tgts))
              if actual_dep_tgts.isdisjoint(transitive_deps_by_target.get(src_tgt, [])):
                missing_tgt_deps_map[(src_tgt, canonical_actual_dep_tgt)].append((src, actual_dep))
              elif canonical_actual_dep_tgt not in src_tgt.dependencies:
                # The canonical dep is the only one a direct dependency makes sense on.
                missing_direct_tgt_deps_map[(src_tgt, canonical_actual_dep_tgt)].append(
                    (src, actual_dep))
        else:
          raise TaskError('Requested dep info for unknown source file: %s' % src)

    return (list(missing_file_deps),
            missing_tgt_deps_map.items(),
            missing_direct_tgt_deps_map.items())
Example #44
0
    def test_resolve_conflicted(self):
        # Create jar_libraries with different versions of the same dep: this will cause
        # a pre-ivy "eviction" in IvyUtils.generate_ivy, but the same case can be triggered
        # due to an ivy eviction where the declared version loses to a transitive version.
        losing_dep = JarDependency('com.google.guava', 'guava', '16.0')
        winning_dep = JarDependency('com.google.guava', 'guava', '16.0.1')
        losing_lib = self.make_target('//:a', JarLibrary, jars=[losing_dep])
        winning_lib = self.make_target('//:b', JarLibrary, jars=[winning_dep])
        # Confirm that the same artifact was added to each target.
        context = self.context(target_roots=[losing_lib, winning_lib])

        def artifact_path(name):
            return os.path.join(self.pants_workdir, 'ivy_artifact', name)

        symlink_map = {
            artifact_path('bogus0'): artifact_path('bogus0'),
            artifact_path('bogus1'): artifact_path('bogus1'),
            artifact_path('unused'): artifact_path('unused')
        }
        task = self.create_task(context, 'unused')

        def mock_ivy_resolve(targets, *args, **kw):
            if targets:
                cache_manager = task.create_cache_manager(False)
                vts = VersionedTargetSet(cache_manager,
                                         cache_manager.wrap_targets(targets))
                cache_key = vts.cache_key.hash
            else:
                cache_key = None
            return [], symlink_map, cache_key

        task.ivy_resolve = mock_ivy_resolve

        def mock_parse_report(resolve_hash_name_ignored, conf):
            ivy_info = IvyInfo(conf)

            # Guava 16.0 would be evicted by Guava 16.0.1.  But in a real
            # resolve, it's possible that before it was evicted, it would
            # generate some resolution data.

            artifact_1 = artifact_path('bogus0')
            unused_artifact = artifact_path('unused')

            # Because guava 16.0 was evicted, it has no artifacts
            guava_0 = IvyModule(
                IvyModuleRef('com.google.guava', 'guava', '16.0'), None, [])
            guava_1 = IvyModule(
                IvyModuleRef('com.google.guava', 'guava', '16.0.1'),
                artifact_1, [])
            ivy_info.add_module(guava_0)
            ivy_info.add_module(guava_1)

            artifact_dep_1 = artifact_path('bogus1')

            # Because fake#dep 16.0 was evicted before it was resolved,
            # its deps are never examined, so we don't call add_module.
            guava_dep_0 = IvyModule(
                IvyModuleRef('com.google.fake', 'dep', '16.0.0'), None,
                [guava_0.ref])
            guava_dep_1 = IvyModule(
                IvyModuleRef('com.google.fake', 'dep', '16.0.1'),
                artifact_dep_1, [guava_1.ref])

            ivy_info.add_module(guava_dep_0)
            ivy_info.add_module(guava_dep_1)

            # Add an unrelated module to ensure that it's not returned
            unrelated_parent = IvyModuleRef('com.google.other', 'parent',
                                            '1.0')
            unrelated = IvyModule(
                IvyModuleRef('com.google.unrelated', 'unrelated', '1.0'),
                unused_artifact, [unrelated_parent])
            ivy_info.add_module(unrelated)

            return ivy_info

        task._parse_report = mock_parse_report
        task.execute()
        compile_classpath = context.products.get_data('compile_classpath',
                                                      None)
        losing_cp = compile_classpath.get_for_target(losing_lib)
        winning_cp = compile_classpath.get_for_target(winning_lib)
        self.assertEquals(losing_cp, winning_cp)
        self.assertEquals(
            OrderedSet([(u'default', artifact_path(u'bogus0')),
                        (u'default', artifact_path(u'bogus1'))]), winning_cp)
Example #45
0
 def javadeps(self):
     return OrderedSet()
Example #46
0
 def collector(dep):
     return OrderedSet([dep])
Example #47
0
class Context(object):
    """Contains the context for a single run of pants.

  Goal implementations can access configuration data from pants.ini and any flags they have exposed
  here as well as information about the targets involved in the run.

  Advanced uses of the context include adding new targets to it for upstream or downstream goals to
  operate on and mapping of products a goal creates to the targets the products are associated with.
  """
    class Log(object):
        """A logger facade that logs into the pants reporting framework."""
        def __init__(self, run_tracker):
            self._run_tracker = run_tracker

        def debug(self, *msg_elements):
            self._run_tracker.log(Report.DEBUG, *msg_elements)

        def info(self, *msg_elements):
            self._run_tracker.log(Report.INFO, *msg_elements)

        def warn(self, *msg_elements):
            self._run_tracker.log(Report.WARN, *msg_elements)

        def error(self, *msg_elements):
            self._run_tracker.log(Report.ERROR, *msg_elements)

        def fatal(self, *msg_elements):
            self._run_tracker.log(Report.FATAL, *msg_elements)

    def __init__(self,
                 config,
                 options,
                 run_tracker,
                 target_roots,
                 requested_goals=None,
                 lock=Lock.unlocked(),
                 log=None,
                 target_base=None):
        self._config = config
        self._options = options
        self.run_tracker = run_tracker
        self._lock = lock
        self._log = log or Context.Log(run_tracker)
        self._target_base = target_base or Target
        self._state = {}
        self._products = Products()
        self._buildroot = get_buildroot()
        self.requested_goals = requested_goals or []

        self.replace_targets(target_roots)

    @property
    def config(self):
        """Returns a Config object containing the configuration data found in pants.ini."""
        return self._config

    @property
    def options(self):
        """Returns the command line options parsed at startup."""
        return self._options

    @property
    def lock(self):
        """Returns the global pants run lock so a goal can release it if needed."""
        return self._lock

    @property
    def log(self):
        """Returns the preferred logger for goals to use."""
        return self._log

    @property
    def products(self):
        """Returns the Products manager for the current run."""
        return self._products

    @property
    def target_roots(self):
        """Returns the targets specified on the command line.

    This set is strictly a subset of all targets in play for the run as returned by self.targets().
    Note that for a command line invocation that uses wildcard selectors : or ::, the targets
    globbed by the wildcards are considered to be target roots.
    """
        return self._target_roots

    def __str__(self):
        return 'Context(id:%s, state:%s, targets:%s)' % (self.id, self.state,
                                                         self.targets())

    @contextmanager
    def new_workunit(self, name, labels=list(), cmd=''):
        with self.run_tracker.new_workunit(name=name, labels=labels,
                                           cmd=cmd) as workunit:
            yield workunit

    def acquire_lock(self):
        """ Acquire the global lock for the root directory associated with this context. When
    a goal requires serialization, it will call this to acquire the lock.
    """
        def onwait(pid):
            print('Waiting on pants process %s to complete' %
                  _process_info(pid),
                  file=sys.stderr)
            return True

        if self._lock.is_unlocked():
            runfile = os.path.join(self._buildroot, '.pants.run')
            self._lock = Lock.acquire(runfile, onwait=onwait)

    def release_lock(self):
        """Release the global lock if it's held.
    Returns True if the lock was held before this call.
    """
        if self._lock.is_unlocked():
            return False
        else:
            self._lock.release()
            self._lock = Lock.unlocked()
            return True

    def is_unlocked(self):
        """Whether the global lock object is actively holding the lock."""
        return self._lock.is_unlocked()

    def replace_targets(self, target_roots):
        """Replaces all targets in the context with the given roots and their transitive
    dependencies.
    """
        self._target_roots = list(target_roots)

        self._targets = OrderedSet()
        for target in self._target_roots:
            self.add_target(target)
        self.id = Target.identify(self._targets)

    def add_target(self, target):
        """Adds a target and its transitive dependencies to the run context.

    The target is not added to the target roots.
    """
        def add_targets(tgt):
            self._targets.update(tgt for tgt in tgt.resolve()
                                 if isinstance(tgt, self._target_base))

        target.walk(add_targets)

    def add_new_target(self, target_base, target_type, *args, **kwargs):
        """Creates a new target, adds it to the context and returns it.

    This method ensures the target resolves files against the given target_base, creating the
    directory if needed and registering a source root.
    """
        if 'derived_from' in kwargs:
            derived_from = kwargs.get('derived_from')
            del kwargs['derived_from']
        else:
            derived_from = None
        target = self._create_new_target(target_base, target_type, *args,
                                         **kwargs)
        self.add_target(target)
        if derived_from:
            target.derived_from = derived_from
        return target

    def _create_new_target(self, target_base, target_type, *args, **kwargs):
        if not os.path.exists(target_base):
            os.makedirs(target_base)
        SourceRoot.register(target_base, target_type)
        with ParseContext.temp(target_base):
            return target_type(*args, **kwargs)

    def remove_target(self, target):
        """Removes the given Target object from the context completely if present."""
        if target in self.target_roots:
            self.target_roots.remove(target)
        self._targets.discard(target)

    def targets(self, predicate=None):
        """Selects targets in-play in this run from the target roots and their transitive dependencies.

    If specified, the predicate will be used to narrow the scope of targets returned.
    """
        return filter(predicate, self._targets)

    def dependents(self, on_predicate=None, from_predicate=None):
        """Returns  a map from targets that satisfy the from_predicate to targets they depend on that
      satisfy the on_predicate.
    """
        core = set(self.targets(on_predicate))
        dependees = defaultdict(set)
        for target in self.targets(from_predicate):
            if hasattr(target, 'dependencies'):
                for dependency in target.dependencies:
                    if dependency in core:
                        dependees[target].add(dependency)
        return dependees

    def resolve(self, spec):
        """Returns an iterator over the target(s) the given address points to."""
        with ParseContext.temp():
            return Pants(spec).resolve()

    @contextmanager
    def state(self, key, default=None):
        value = self._state.get(key, default)
        yield value
        self._state[key] = value

    @contextmanager
    def timing(self, label):
        if self.timer:
            with self.timer.timing(label):
                yield
        else:
            yield
Example #48
0
    def setup_parser(self, parser, args):
        if not args:
            args.append('help')

        logger = logging.getLogger(__name__)

        goals = self.new_options.goals
        specs = self.new_options.target_specs
        fail_fast = self.new_options.for_global_scope().fail_fast

        for goal in goals:
            if BuildFile.from_cache(get_buildroot(), goal,
                                    must_exist=False).exists():
                logger.warning(
                    " Command-line argument '{0}' is ambiguous and was assumed to be "
                    "a goal. If this is incorrect, disambiguate it with ./{0}."
                    .format(goal))

        if self.new_options.is_help:
            self.new_options.print_help(goals=goals)
            sys.exit(0)

        self.requested_goals = goals

        with self.run_tracker.new_workunit(name='setup',
                                           labels=[WorkUnit.SETUP]):
            spec_parser = CmdLineSpecParser(
                self.root_dir,
                self.address_mapper,
                spec_excludes=self.get_spec_excludes())
            with self.run_tracker.new_workunit(name='parse',
                                               labels=[WorkUnit.SETUP]):
                for spec in specs:
                    for address in spec_parser.parse_addresses(
                            spec, fail_fast):
                        self.build_graph.inject_address_closure(address)
                        self.targets.append(
                            self.build_graph.get_target(address))
        self.goals = [Goal.by_name(goal) for goal in goals]

        rcfiles = self.config.getdefault(
            'rcfiles', type=list, default=['/etc/pantsrc', '~/.pants.rc'])
        if rcfiles:
            rcfile = RcFile(rcfiles,
                            default_prepend=False,
                            process_default=True)

            # Break down the goals specified on the command line to the full set that will be run so we
            # can apply default flags to inner goal nodes.  Also break down goals by Task subclass and
            # register the task class hierarchy fully qualified names so we can apply defaults to
            # baseclasses.

            sections = OrderedSet()
            for goal in Engine.execution_order(self.goals):
                for task_name in goal.ordered_task_names():
                    sections.add(task_name)
                    task_type = goal.task_type_by_name(task_name)
                    for clazz in task_type.mro():
                        if clazz == Task:
                            break
                        sections.add('%s.%s' %
                                     (clazz.__module__, clazz.__name__))

            augmented_args = rcfile.apply_defaults(sections, args)
            if augmented_args != args:
                # TODO(John Sirois): Cleanup this currently important mutation of the passed in args
                # once the 2-layer of command -> goal is squashed into one.
                args[:] = augmented_args
                sys.stderr.write("(using pantsrc expansion: pants goal %s)\n" %
                                 ' '.join(augmented_args))
Example #49
0
 def create_source_template(base_id, includes=None, excludes=None):
     return TemplateData(
         base=base_id,
         includes='|'.join(OrderedSet(includes)) if includes else None,
         excludes='|'.join(OrderedSet(excludes)) if excludes else None,
     )
Example #50
0
    def test_pants_contrib_case(self):
        def create_requirement_lib(name):
            return self.create_python_requirement_library(
                relpath=name,
                name=name,
                requirements=['{}==1.1.1'.format(name)])

        req1 = create_requirement_lib('req1')
        create_requirement_lib('req2')
        req3 = create_requirement_lib('req3')

        self.create_python_library(relpath='src/python/pants/base',
                                   name='base',
                                   dependencies=[
                                       'req1',
                                       'req2',
                                   ])
        self.create_python_binary(
            relpath='src/python/pants/bin',
            name='bin',
            entry_point='pants.bin.pants_exe:main',
            dependencies=[
                # Should be stripped in reduced_dependencies since pants_packaged provides these sources.
                'src/python/pants/base',
            ])
        pants_packaged = self.create_python_library(relpath='src/python/pants',
                                                    name='pants_packaged',
                                                    provides=dedent("""
      setup_py(
        name='pants_packaged',
        version='0.0.0'
      ).with_binaries(
        # Should be stripped in reduced_dependencies since pants_packaged provides this.
        pants_bin='src/python/pants/bin'
      )
      """))
        contrib_lib = self.create_python_library(
            relpath='contrib/lib/src/python/pants/contrib/lib',
            name='lib',
            dependencies=[
                'req3',
                # Should be stripped in reduced_dependencies since pants_packaged provides these sources.
                'src/python/pants/base',
            ])
        contrib_plugin = self.create_python_library(
            relpath='contrib/lib/src/python/pants/contrib',
            name='plugin',
            provides=dedent("""
      setup_py(
        name='contrib',
        version='0.0.0'
      )
      """),
            dependencies=[
                'contrib/lib/src/python/pants/contrib/lib',
                'src/python/pants:pants_packaged', 'req1'
            ])
        reduced_dependencies = self.dependency_calculator.reduced_dependencies(
            contrib_plugin)
        self.assertEqual(reduced_dependencies,
                         OrderedSet([contrib_lib, req3, pants_packaged, req1]))
Example #51
0
  def split_args(self, args: Optional[List[str]] = None) -> SplitArgs:
    """Split the specified arg list (or sys.argv if unspecified).

    args[0] is ignored.

    Returns a SplitArgs tuple.
    """
    goals = OrderedSet()
    scope_to_flags: Dict[str, List[str]] = {}

    def add_scope(s: str) -> None:
      # Force the scope to appear, even if empty.
      if s not in scope_to_flags:
        scope_to_flags[s] = []

    positional_args = []
    passthru = []
    passthru_owner = None

    self._unconsumed_args = list(reversed(sys.argv if args is None else args))
    # In regular use the first token is the binary name, so skip it. However tests may
    # pass just a list of flags, so don't skip it in that case.
    if not self._at_flag() and self._unconsumed_args:
      self._unconsumed_args.pop()

    def assign_flag_to_scope(flg: str, default_scope: str) -> None:
      flag_scope, descoped_flag = self._descope_flag(flg, default_scope=default_scope)
      if flag_scope not in scope_to_flags:
        scope_to_flags[flag_scope] = []
      scope_to_flags[flag_scope].append(descoped_flag)

    global_flags = self._consume_flags()

    add_scope(GLOBAL_SCOPE)
    for flag in global_flags:
      assign_flag_to_scope(flag, GLOBAL_SCOPE)
    scope, flags = self._consume_scope()
    while scope:
      if not self._check_for_help_request(scope.lower()):
        add_scope(scope)
        goals.add(scope.partition('.')[0])
        passthru_owner = scope
        for flag in flags:
          assign_flag_to_scope(flag, scope)
      scope, flags = self._consume_scope()

    while self._unconsumed_args and not self._at_double_dash():
      arg = self._unconsumed_args.pop()
      if arg.startswith('-'):
        # We assume any args here are in global scope.
        if not self._check_for_help_request(arg):
          assign_flag_to_scope(arg, GLOBAL_SCOPE)
      elif self.is_positional_arg(arg):
        positional_args.append(arg)
      elif arg not in self._known_scopes:
        self._unknown_scopes.append(arg)

    if self._at_double_dash():
      self._unconsumed_args.pop()
      passthru = list(reversed(self._unconsumed_args))

    if self._unknown_scopes:
      self._help_request = UnknownGoalHelp(self._unknown_scopes)

    if not goals and not self._help_request:
      self._help_request = NoGoalHelp()

    return SplitArgs(list(goals), scope_to_flags, positional_args, passthru,
                     passthru_owner if passthru else None, self._unknown_scopes)
Example #52
0
 def create_javadeps():
   gen_info = context.config.getlist('thriftstore-dml-gen', 'javadeps')
   deps = OrderedSet()
   for dep in gen_info:
     deps.update(context.resolve(dep))
   return deps
Example #53
0
 def testMustExistFalse(self):
   buildfile = BuildFile(BuildFileTest.root_dir, "path-that-does-not-exist/BUILD", must_exist=False)
   self.assertEquals(OrderedSet([buildfile]), buildfile.family())
Example #54
0
 def testAncestors(self):
   self.assertEquals(OrderedSet([
       BuildFileTest.buildfile('BUILD'),
       BuildFileTest.buildfile('BUILD.twitter'),
   ]), self.buildfile.ancestors())
Example #55
0
 def testDescendants(self):
   self.assertEquals(OrderedSet([
       BuildFileTest.buildfile('grandparent/parent/child1/BUILD'),
       BuildFileTest.buildfile('grandparent/parent/child1/BUILD.twitter'),
       BuildFileTest.buildfile('grandparent/parent/child2/child3/BUILD'),
   ]), self.buildfile.descendants())
Example #56
0
 def create_collection(dep):
     return OrderedSet([dep])
Example #57
0
 def __init__(self, root):
   """
   :API: public
   """
   self._root = root
   self._rel_paths = OrderedSet()
Example #58
0
        def process_target(current_target):
            """
      :type current_target:pants.build_graph.target.Target
      """
            def get_target_type(tgt):
                def is_test(t):
                    return isinstance(t, JUnitTests) or isinstance(
                        t, PythonTests)

                if is_test(tgt):
                    return ExportTask.SourceRootTypes.TEST
                else:
                    if (isinstance(tgt, Resources)
                            and tgt in resource_target_map
                            and is_test(resource_target_map[tgt])):
                        return ExportTask.SourceRootTypes.TEST_RESOURCE
                    elif isinstance(tgt, Resources):
                        return ExportTask.SourceRootTypes.RESOURCE
                    else:
                        return ExportTask.SourceRootTypes.SOURCE

            info = {
                'targets': [],
                'libraries': [],
                'roots': [],
                'id':
                current_target.id,
                'target_type':
                get_target_type(current_target),
                # NB: is_code_gen should be removed when export format advances to 1.1.0 or higher
                'is_code_gen':
                current_target.is_synthetic,
                'is_synthetic':
                current_target.is_synthetic,
                'pants_target_type':
                self._get_pants_target_alias(type(current_target)),
            }

            if not current_target.is_synthetic:
                info['globs'] = current_target.globs_relative_to_buildroot()
                if self.get_options().sources:
                    info['sources'] = list(
                        current_target.sources_relative_to_buildroot())

            info['transitive'] = current_target.transitive
            info['scope'] = str(current_target.scope)
            info['is_target_root'] = current_target in target_roots_set

            if isinstance(current_target, PythonRequirementLibrary):
                reqs = current_target.payload.get_field_value(
                    'requirements', set())
                """:type : set[pants.backend.python.python_requirement.PythonRequirement]"""
                info['requirements'] = [req.key for req in reqs]

            if isinstance(current_target, PythonTarget):
                interpreter_for_target = self._interpreter_cache.select_interpreter_for_targets(
                    [current_target])
                if interpreter_for_target is None:
                    raise TaskError(
                        'Unable to find suitable interpreter for {}'.format(
                            current_target.address))
                python_interpreter_targets_mapping[
                    interpreter_for_target].append(current_target)
                info['python_interpreter'] = str(
                    interpreter_for_target.identity)

            def iter_transitive_jars(jar_lib):
                """
        :type jar_lib: :class:`pants.backend.jvm.targets.jar_library.JarLibrary`
        :rtype: :class:`collections.Iterator` of
                :class:`pants.java.jar.M2Coordinate`
        """
                if classpath_products:
                    jar_products = classpath_products.get_artifact_classpath_entries_for_targets(
                        (jar_lib, ))
                    for _, jar_entry in jar_products:
                        coordinate = jar_entry.coordinate
                        # We drop classifier and type_ since those fields are represented in the global
                        # libraries dict and here we just want the key into that dict (see `_jar_id`).
                        yield M2Coordinate(org=coordinate.org,
                                           name=coordinate.name,
                                           rev=coordinate.rev)

            target_libraries = OrderedSet()
            if isinstance(current_target, JarLibrary):
                target_libraries = OrderedSet(
                    iter_transitive_jars(current_target))
            for dep in current_target.dependencies:
                info['targets'].append(dep.address.spec)
                if isinstance(dep, JarLibrary):
                    for jar in dep.jar_dependencies:
                        target_libraries.add(
                            M2Coordinate(jar.org, jar.name, jar.rev))
                    # Add all the jars pulled in by this jar_library
                    target_libraries.update(iter_transitive_jars(dep))
                if isinstance(dep, Resources):
                    resource_target_map[dep] = current_target

            if isinstance(current_target, ScalaLibrary):
                for dep in current_target.java_sources:
                    info['targets'].append(dep.address.spec)
                    process_target(dep)

            if isinstance(current_target, JvmTarget):
                info['excludes'] = [
                    self._exclude_id(exclude)
                    for exclude in current_target.excludes
                ]
                info['platform'] = current_target.platform.name
                if hasattr(current_target, 'test_platform'):
                    info['test_platform'] = current_target.test_platform.name

            info['roots'] = map(
                lambda source_root_package_prefix: {
                    'source_root': source_root_package_prefix[0],
                    'package_prefix': source_root_package_prefix[1]
                }, self._source_roots_for_target(current_target))

            if classpath_products:
                info['libraries'] = [
                    self._jar_id(lib) for lib in target_libraries
                ]
            targets_map[current_target.address.spec] = info
Example #59
0
 def resolve_deps(key):
   deps = OrderedSet()
   for dep in context.config.getlist('protobuf-gen', key):
     deps.update(context.resolve(dep))
   return deps
Example #60
0
 def scan_build_files(self, base_path):
     build_files = BuildFile.scan_build_files(
         self._project_tree,
         base_path,
         build_ignore_patterns=self._build_ignore_patterns)
     return OrderedSet(bf.relpath for bf in build_files)