Exemplo n.º 1
0
    def execute(self):
        python_tgts = self.context.targets(
            lambda tgt: isinstance(tgt, PythonTarget))
        fs = PythonInterpreterFingerprintStrategy()
        with self.invalidated(python_tgts,
                              fingerprint_strategy=fs) as invalidation_check:
            if (PythonSetup.global_instance().interpreter_search_paths
                    and PythonInterpreterCache.pex_python_paths()):
                self.context.log.warn(
                    "Detected both PEX_PYTHON_PATH and "
                    "--python-setup-interpreter-search-paths. Ignoring "
                    "--python-setup-interpreter-search-paths.")
            # If there are no relevant targets, we still go through the motions of selecting
            # an interpreter, to prevent downstream tasks from having to check for this special case.
            if invalidation_check.all_vts:
                target_set_id = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts).cache_key.hash
            else:
                target_set_id = 'no_targets'
            interpreter_path_file = self._interpreter_path_file(target_set_id)
            if not os.path.exists(interpreter_path_file):
                self._create_interpreter_path_file(interpreter_path_file,
                                                   python_tgts)

        interpreter = self._get_interpreter(interpreter_path_file)
        self.context.products.register_data(PythonInterpreter, interpreter)
Exemplo n.º 2
0
    def execute(self):
        interpreter = None
        python_tgts = self.context.targets(
            lambda tgt: isinstance(tgt, PythonTarget))
        fs = PythonInterpreterFingerprintStrategy(task=self)
        with self.invalidated(python_tgts,
                              fingerprint_strategy=fs) as invalidation_check:
            # If there are no relevant targets, we still go through the motions of selecting
            # an interpreter, to prevent downstream tasks from having to check for this special case.
            if invalidation_check.all_vts:
                target_set_id = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts).cache_key.hash
            else:
                target_set_id = 'no_targets'
            interpreter_path_file = os.path.join(self.workdir, target_set_id,
                                                 'interpreter.path')
            if not os.path.exists(interpreter_path_file):
                interpreter_cache = PythonInterpreterCache(
                    PythonSetup.global_instance(),
                    PythonRepos.global_instance(),
                    logger=self.context.log.debug)

                # We filter the interpreter cache itself (and not just the interpreters we pull from it)
                # because setting up some python versions (e.g., 3<=python<3.3) crashes, and this gives us
                # an escape hatch.
                filters = self.get_options().constraints or [b'']

                # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
                self.context.acquire_lock()
                try:
                    interpreter_cache.setup(filters=filters)
                finally:
                    self.context.release_lock()

                interpreter = interpreter_cache.select_interpreter_for_targets(
                    python_tgts)
                safe_mkdir_for(interpreter_path_file)
                with open(interpreter_path_file, 'w') as outfile:
                    outfile.write(b'{}\t{}\n'.format(
                        interpreter.binary, str(interpreter.identity)))
                    for dist, location in interpreter.extras.items():
                        dist_name, dist_version = dist
                        outfile.write(b'{}\t{}\t{}\n'.format(
                            dist_name, dist_version, location))

        if not interpreter:
            with open(interpreter_path_file, 'r') as infile:
                lines = infile.readlines()
                binary, identity = lines[0].strip().split('\t')
                extras = {}
                for line in lines[1:]:
                    dist_name, dist_version, location = line.strip().split(
                        '\t')
                    extras[(dist_name, dist_version)] = location

            interpreter = PythonInterpreter(binary,
                                            PythonIdentity.from_path(identity),
                                            extras)

        self.context.products.get_data(PythonInterpreter, lambda: interpreter)
Exemplo n.º 3
0
  def execute(self):
    targets = self.context.targets(predicate=has_python_sources)
    with self.invalidated(targets) as invalidation_check:
      # If there are no relevant targets, we still go through the motions of gathering
      # an empty set of sources, to prevent downstream tasks from having to check
      # for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
            invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'

      interpreter = self.context.products.get_data(PythonInterpreter)
      path = os.path.join(self.workdir, target_set_id)

      # Note that we check for the existence of the directory, instead of for invalid_vts,
      # to cover the empty case.
      if not os.path.isdir(path):
        path_tmp = path + '.tmp'
        shutil.rmtree(path_tmp, ignore_errors=True)
        self._build_pex(interpreter, path_tmp, invalidation_check.all_vts)
        shutil.move(path_tmp, path)

    pex = PEX(os.path.realpath(path), interpreter=interpreter)
    self.context.products.get_data(self.PYTHON_SOURCES, lambda: pex)
Exemplo n.º 4
0
  def execute(self):
    # NB: Downstream product consumers may need the selected interpreter for use with
    # any type of importable Python target, including `PythonRequirementLibrary` targets
    # (for use with the `repl` goal, for instance). For interpreter selection,
    # we only care about targets with compatibility constraints.
    python_tgts_and_reqs = self.context.targets(
      lambda tgt: isinstance(tgt, (PythonTarget, PythonRequirementLibrary))
    )
    if not python_tgts_and_reqs:
      return
    python_tgts = [tgt for tgt in python_tgts_and_reqs if isinstance(tgt, PythonTarget)]
    fs = PythonInterpreterFingerprintStrategy(python_setup=self._interpreter_cache.python_setup)
    with self.invalidated(python_tgts, fingerprint_strategy=fs) as invalidation_check:
      # If there are no constraints, meaning no global constraints nor compatibility requirements on
      # the targets, we still go through the motions of selecting an interpreter, to prevent
      # downstream tasks from having to check for this special case.
      target_set_id = (
        'no_constraints'
        if not invalidation_check.all_vts else
        VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts).cache_key.hash
      )
      interpreter_path_file = self._interpreter_path_file(target_set_id)
      interpreter = self._get_interpreter(interpreter_path_file, python_tgts)

    self.context.products.register_data(PythonInterpreter, interpreter)
Exemplo n.º 5
0
  def resolve_requirements(self, req_libs):
    """Requirements resolution for PEX files.

    :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve.
    :returns: a PEX containing target requirements and any specified python dist targets.
    """
    with self.invalidated(req_libs) as invalidation_check:
      # If there are no relevant targets, we still go through the motions of resolving
      # an empty set of requirements, to prevent downstream tasks from having to check
      # for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
            invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'

      interpreter = self.context.products.get_data(PythonInterpreter)
      path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id))
      # Note that we check for the existence of the directory, instead of for invalid_vts,
      # to cover the empty case.
      if not os.path.isdir(path):
        with safe_concurrent_creation(path) as safe_path:
          builder = PEXBuilder(path=safe_path, interpreter=interpreter, copy=True)
          dump_requirement_libs(builder, interpreter, req_libs, self.context.log)
          builder.freeze()
    return PEX(path, interpreter=interpreter)
Exemplo n.º 6
0
    def execute(self):
        # NB: Downstream product consumers may need the selected interpreter for use with
        # any type of importable Python target, including `PythonRequirementLibrary` targets
        # (for use with the `repl` goal, for instance). For interpreter selection,
        # we only care about targets with compatibility constraints.
        python_tgts_and_reqs = self.context.targets(lambda tgt: isinstance(
            tgt, (PythonTarget, PythonRequirementLibrary)))
        if not python_tgts_and_reqs:
            return
        python_tgts = [
            tgt for tgt in python_tgts_and_reqs
            if isinstance(tgt, PythonTarget)
        ]
        fs = PythonInterpreterFingerprintStrategy(
            python_setup=self._interpreter_cache.python_setup)
        with self.invalidated(python_tgts,
                              fingerprint_strategy=fs) as invalidation_check:
            # If there are no constraints, meaning no global constraints nor compatibility requirements on
            # the targets, we still go through the motions of selecting an interpreter, to prevent
            # downstream tasks from having to check for this special case.
            target_set_id = ('no_constraints' if not invalidation_check.all_vts
                             else VersionedTargetSet.from_versioned_targets(
                                 invalidation_check.all_vts).cache_key.hash)
            interpreter_path_file = self._interpreter_path_file(target_set_id)
            interpreter = self._get_interpreter(interpreter_path_file,
                                                python_tgts)

        self.context.products.register_data(PythonInterpreter, interpreter)
Exemplo n.º 7
0
 def test_for_illegal_vts(self):
   # The update() checks this through vts.ensure_legal, checked here since those checks are on different branches.
   vt = self.make_vt()
   self.clobber_symlink(vt)
   vts = VersionedTargetSet.from_versioned_targets([vt])
   with self.assertRaises(VersionedTargetSet.IllegalResultsDir):
     vts.update()
Exemplo n.º 8
0
    def execute(self):
        targets = self.context.targets(
            lambda tgt: isinstance(tgt, (PythonTarget, Resources)))
        with self.invalidated(targets) as invalidation_check:
            # If there are no relevant targets, we still go through the motions of gathering
            # an empty set of sources, to prevent downstream tasks from having to check
            # for this special case.
            if invalidation_check.all_vts:
                target_set_id = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts).cache_key.hash
            else:
                target_set_id = 'no_targets'

            path = os.path.join(self.workdir, target_set_id)
            path_tmp = path + '.tmp'

            shutil.rmtree(path_tmp, ignore_errors=True)

            interpreter = self.context.products.get_data(PythonInterpreter)
            if not os.path.isdir(path):
                self._build_pex(interpreter, path_tmp,
                                invalidation_check.all_vts)
                shutil.move(path_tmp, path)

        pex = PEX(os.path.realpath(path), interpreter=interpreter)
        self.context.products.get_data(self.PYTHON_SOURCES, lambda: pex)
Exemplo n.º 9
0
  def execute(self):
    python_tgts_and_reqs = self.context.targets(
      lambda tgt: isinstance(tgt, (PythonTarget, PythonRequirementLibrary))
    )
    if not python_tgts_and_reqs:
      return
    python_tgts = [tgt for tgt in python_tgts_and_reqs if isinstance(tgt, PythonTarget)]
    fs = PythonInterpreterFingerprintStrategy()
    with self.invalidated(python_tgts, fingerprint_strategy=fs) as invalidation_check:
      if (PythonSetup.global_instance().interpreter_search_paths
          and PythonInterpreterCache.pex_python_paths()):
        self.context.log.warn("Detected both PEX_PYTHON_PATH and "
                              "--python-setup-interpreter-search-paths. Ignoring "
                              "--python-setup-interpreter-search-paths.")
      # If there are no relevant targets, we still go through the motions of selecting
      # an interpreter, to prevent downstream tasks from having to check for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
            invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'
      interpreter_path_file = self._interpreter_path_file(target_set_id)
      if not os.path.exists(interpreter_path_file):
        self._create_interpreter_path_file(interpreter_path_file, python_tgts)

    interpreter = self._get_interpreter(interpreter_path_file)
    self.context.products.register_data(PythonInterpreter, interpreter)
Exemplo n.º 10
0
    def execute(self):
        req_libs = self.context.targets(
            lambda tgt: isinstance(tgt, PythonRequirementLibrary))
        fs = PythonRequirementFingerprintStrategy(task=self)
        with self.invalidated(req_libs,
                              fingerprint_strategy=fs) as invalidation_check:
            # If there are no relevant targets, we still go through the motions of resolving
            # an empty set of requirements, to prevent downstream tasks from having to check
            # for this special case.
            if invalidation_check.all_vts:
                target_set_id = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts).cache_key.hash
            else:
                target_set_id = 'no_targets'

            interpreter = self.context.products.get_data(PythonInterpreter)
            path = os.path.join(self.workdir, str(interpreter.identity),
                                target_set_id)
            path_tmp = path + '.tmp'

            shutil.rmtree(path_tmp, ignore_errors=True)

            if not os.path.isdir(path):
                self._build_pex(interpreter, path_tmp, req_libs)
                shutil.move(path_tmp, path)

        pex = PEX(os.path.realpath(path), interpreter=interpreter)
        self.context.products.get_data(self.REQUIREMENTS_PEX, lambda: pex)
Exemplo n.º 11
0
  def resolve_requirements(self, interpreter, req_libs):
    """Requirements resolution for PEX files.

    :param interpreter: Resolve against this :class:`PythonInterpreter`.
    :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve.
    :returns: a PEX containing target requirements and any specified python dist targets.
    """
    with self.invalidated(req_libs) as invalidation_check:
      # If there are no relevant targets, we still go through the motions of resolving
      # an empty set of requirements, to prevent downstream tasks from having to check
      # for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
            invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'

      # We need to ensure that we are resolving for only the current platform if we are
      # including local python dist targets that have native extensions.
      tgts = self.context.targets()
      maybe_platforms = ['current'] if build_for_current_platform_only_check(tgts) else None

      path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id))
      # Note that we check for the existence of the directory, instead of for invalid_vts,
      # to cover the empty case.
      if not os.path.isdir(path):
        with safe_concurrent_creation(path) as safe_path:
          builder = PEXBuilder(path=safe_path, interpreter=interpreter, copy=True)
          dump_requirement_libs(builder, interpreter, req_libs, self.context.log, platforms=maybe_platforms)
          builder.freeze()
    return PEX(path, interpreter=interpreter)
Exemplo n.º 12
0
  def execute(self):
    products = self.context.products
    targets = self.context.targets(lambda t: isinstance(t, JarLibrary))

    with self.invalidated(targets, invalidate_dependents=False) as invalidation_check:
      global_vts = VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts)
      vts_workdir = os.path.join(self._workdir, global_vts.cache_key.hash)
      vts_analysis_file = os.path.join(vts_workdir, 'buildgen_analysis.json')
      if invalidation_check.invalid_vts or not os.path.exists(vts_analysis_file):
        classpath = self.context.products.get_data('compile_classpath')
        jar_entries = classpath.get_for_targets(targets)
        all_jars = [jar for _, jar in jar_entries]
        calculated_analysis = {}
        calculated_analysis['hash'] = global_vts.cache_key.hash
        calculated_analysis['jar_to_symbols_exported'] = {}
        for jar_path in sorted(all_jars):
          if os.path.splitext(jar_path)[1] != '.jar':
            continue
          fully_qualified_classes = list(set(self.fully_qualified_classes_from_jar(jar_path)))
          calculated_analysis['jar_to_symbols_exported'][jar_path] = {
            'fully_qualified_classes': fully_qualified_classes,
          }
        calculated_analysis_json = json.dumps(calculated_analysis)
        safe_mkdir(vts_workdir)
        with open(vts_analysis_file, 'wb') as f:
          f.write(calculated_analysis_json)
        if self.artifact_cache_writes_enabled():
          self.update_artifact_cache([(global_vts, [vts_analysis_file])])
      with open(vts_analysis_file, 'rb') as f:
        analysis = json.loads(f.read())

      third_party_jar_symbols = set(chain.from_iterable(
        v['fully_qualified_classes'] for v in analysis['jar_to_symbols_exported'].values()
      ))
      products.safe_create_data('third_party_jar_symbols', lambda: third_party_jar_symbols)
Exemplo n.º 13
0
 def test_for_illegal_vts(self):
   # The update() checks this through vts.ensure_legal, checked here since those checks are on different branches.
   vt = self.make_vt()
   self.clobber_symlink(vt)
   vts = VersionedTargetSet.from_versioned_targets([vt])
   with self.assertRaises(VersionedTargetSet.IllegalResultsDir):
     vts.update()
Exemplo n.º 14
0
  def create_pex(self, pex_info=None):
    """Returns a wrapped pex that "merges" other pexes produced in previous tasks via PEX_PATH.

    This method always creates a PEX to run locally on the current platform and selected
    interpreter: to create a pex that is distributable to other environments, use the pex_build_util
    Subsystem.

    The returned pex will have the pexes from the ResolveRequirements and GatherSources tasks mixed
    into it via PEX_PATH. Any 3rdparty requirements declared with self.extra_requirements() will
    also be resolved for the global interpreter, and added to the returned pex via PEX_PATH.

    :param pex_info: An optional PexInfo instance to provide to self.merged_pex().
    :type pex_info: :class:`pex.pex_info.PexInfo`, or None
    task. Otherwise, all of the interpreter constraints from all python targets will applied.
    :rtype: :class:`pex.pex.PEX`
    """
    relevant_targets = self.context.targets(
      lambda tgt: isinstance(tgt, (
        PythonDistribution, PythonRequirementLibrary, PythonTarget, Files)))
    with self.invalidated(relevant_targets) as invalidation_check:

      # If there are no relevant targets, we still go through the motions of resolving
      # an empty set of requirements, to prevent downstream tasks from having to check
      # for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
          invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'

      interpreter = self.context.products.get_data(PythonInterpreter)
      path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id))

      # Note that we check for the existence of the directory, instead of for invalid_vts,
      # to cover the empty case.
      if not os.path.isdir(path):
        pexes = [
          self.context.products.get_data(ResolveRequirements.REQUIREMENTS_PEX),
          self.context.products.get_data(GatherSources.PYTHON_SOURCES)
        ]

        if self.extra_requirements():
          extra_requirements_pex = self.resolve_requirement_strings(
            interpreter, self.extra_requirements())
          # Add the extra requirements first, so they take precedence over any colliding version
          # in the target set's dependency closure.
          pexes = [extra_requirements_pex] + pexes

        # NB: See docstring. We always use the previous selected interpreter.
        constraints = {str(interpreter.identity.requirement)}

        with self.merged_pex(path, pex_info, interpreter, pexes, constraints) as builder:
          for extra_file in self.extra_files():
            extra_file.add_to(builder)
          builder.freeze(bytecode_compile=False)

    return PEX(path, interpreter)
Exemplo n.º 15
0
 def mock_ivy_resolve(targets, *args, **kw):
     if targets:
         cache_manager = task.create_cache_manager(False)
         vts = VersionedTargetSet(cache_manager,
                                  cache_manager.wrap_targets(targets))
         cache_key = vts.cache_key.hash
     else:
         cache_key = None
     return [], symlink_map, cache_key
Exemplo n.º 16
0
  def create_pex(self, pex_info=None):
    """Returns a wrapped pex that "merges" the other pexes via PEX_PATH."""
    relevant_targets = self.context.targets(
      lambda tgt: isinstance(tgt, (PythonRequirementLibrary, PythonTarget, Resources)))
    with self.invalidated(relevant_targets) as invalidation_check:

      # If there are no relevant targets, we still go through the motions of resolving
      # an empty set of requirements, to prevent downstream tasks from having to check
      # for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
          invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'

      interpreter = self.context.products.get_data(PythonInterpreter)
      path = os.path.join(self.workdir, str(interpreter.identity), target_set_id)
      extra_pex_paths_file_path = path + '.extra_pex_paths'
      extra_pex_paths = None

      # Note that we check for the existence of the directory, instead of for invalid_vts,
      # to cover the empty case.
      if not os.path.isdir(path):
        pexes = [
          self.context.products.get_data(ResolveRequirements.REQUIREMENTS_PEX),
          self.context.products.get_data(GatherSources.PYTHON_SOURCES)
        ]

        if self.extra_requirements():
          extra_reqs = [PythonRequirement(req_str) for req_str in self.extra_requirements()]
          addr = Address.parse('{}_extra_reqs'.format(self.__class__.__name__))
          self.context.build_graph.inject_synthetic_target(
            addr, PythonRequirementLibrary, requirements=extra_reqs)
          # Add the extra requirements first, so they take precedence over any colliding version
          # in the target set's dependency closure.
          pexes = [self.resolve_requirements([self.context.build_graph.get_target(addr)])] + pexes

        extra_pex_paths = [pex.path() for pex in pexes if pex]

        with safe_concurrent_creation(path) as safe_path:
          builder = PEXBuilder(safe_path, interpreter, pex_info=pex_info)
          builder.freeze()

        with open(extra_pex_paths_file_path, 'w') as outfile:
          for epp in extra_pex_paths:
            outfile.write(epp)
            outfile.write(b'\n')

    if extra_pex_paths is None:
      with open(extra_pex_paths_file_path, 'r') as infile:
        extra_pex_paths = [p.strip() for p in infile.readlines()]
    return WrappedPEX(PEX(os.path.realpath(path), interpreter), extra_pex_paths, interpreter)
Exemplo n.º 17
0
  def create_pex(self, pex_info=None):
    """Returns a wrapped pex that "merges" the other pexes via PEX_PATH."""
    relevant_targets = self.context.targets(
      lambda tgt: isinstance(tgt, (PythonRequirementLibrary, PythonTarget, Resources)))
    with self.invalidated(relevant_targets) as invalidation_check:

      # If there are no relevant targets, we still go through the motions of resolving
      # an empty set of requirements, to prevent downstream tasks from having to check
      # for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
          invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'

      interpreter = self.context.products.get_data(PythonInterpreter)
      path = os.path.join(self.workdir, str(interpreter.identity), target_set_id)
      extra_pex_paths_file_path = path + '.extra_pex_paths'
      extra_pex_paths = None

      # Note that we check for the existence of the directory, instead of for invalid_vts,
      # to cover the empty case.
      if not os.path.isdir(path):
        pexes = [
          self.context.products.get_data(ResolveRequirements.REQUIREMENTS_PEX),
          self.context.products.get_data(GatherSources.PYTHON_SOURCES)
        ]

        if self.extra_requirements():
          extra_reqs = [PythonRequirement(req_str) for req_str in self.extra_requirements()]
          addr = Address.parse('{}_extra_reqs'.format(self.__class__.__name__))
          self.context.build_graph.inject_synthetic_target(
            addr, PythonRequirementLibrary, requirements=extra_reqs)
          # Add the extra requirements first, so they take precedence over any colliding version
          # in the target set's dependency closure.
          pexes = [self.resolve_requirements([self.context.build_graph.get_target(addr)])] + pexes

        extra_pex_paths = [pex.path() for pex in pexes if pex]

        with safe_concurrent_creation(path) as safe_path:
          builder = PEXBuilder(safe_path, interpreter, pex_info=pex_info)
          builder.freeze()

        with open(extra_pex_paths_file_path, 'w') as outfile:
          for epp in extra_pex_paths:
            outfile.write(epp)
            outfile.write(b'\n')

    if extra_pex_paths is None:
      with open(extra_pex_paths_file_path, 'r') as infile:
        extra_pex_paths = [p.strip() for p in infile.readlines()]
    return WrappedPEX(PEX(os.path.realpath(path), interpreter), extra_pex_paths, interpreter)
Exemplo n.º 18
0
    def create_pex(self, pex_info=None):
        """Returns a wrapped pex that "merges" the other pexes via PEX_PATH."""
        relevant_targets = self.context.targets(lambda tgt: isinstance(
            tgt, (PythonDistribution, PythonRequirementLibrary, PythonTarget,
                  Files)))
        with self.invalidated(relevant_targets) as invalidation_check:

            # If there are no relevant targets, we still go through the motions of resolving
            # an empty set of requirements, to prevent downstream tasks from having to check
            # for this special case.
            if invalidation_check.all_vts:
                target_set_id = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts).cache_key.hash
            else:
                target_set_id = 'no_targets'

            interpreter = self.context.products.get_data(PythonInterpreter)
            path = os.path.realpath(
                os.path.join(self.workdir, str(interpreter.identity),
                             target_set_id))

            # Note that we check for the existence of the directory, instead of for invalid_vts,
            # to cover the empty case.
            if not os.path.isdir(path):
                pexes = [
                    self.context.products.get_data(
                        ResolveRequirements.REQUIREMENTS_PEX),
                    self.context.products.get_data(
                        GatherSources.PYTHON_SOURCES)
                ]

                if self.extra_requirements():
                    extra_requirements_pex = self.resolve_requirement_strings(
                        interpreter, self.extra_requirements())
                    # Add the extra requirements first, so they take precedence over any colliding version
                    # in the target set's dependency closure.
                    pexes = [extra_requirements_pex] + pexes
                constraints = {
                    constraint
                    for rt in relevant_targets if is_python_target(rt)
                    for constraint in PythonSetup.global_instance().
                    compatibility_or_constraints(rt)
                }

                with self.merged_pex(path, pex_info, interpreter, pexes,
                                     constraints) as builder:
                    for extra_file in self.extra_files():
                        extra_file.add_to(builder)
                    builder.freeze()

        return PEX(path, interpreter)
Exemplo n.º 19
0
 def _get_pex_for_versioned_targets(self, interpreter, versioned_targets):
   if versioned_targets:
     target_set_id = VersionedTargetSet.from_versioned_targets(versioned_targets).cache_key.hash
   else:
     raise TaskError("Can't create pex in gather_sources: No python targets provided")
   source_pex_path = os.path.realpath(os.path.join(self.workdir, target_set_id))
   # Note that we check for the existence of the directory, instead of for invalid_vts,
   # to cover the empty case.
   if not os.path.isdir(source_pex_path):
     # Note that we use the same interpreter for all targets: We know the interpreter
     # is compatible (since it's compatible with all targets in play).
     with safe_concurrent_creation(source_pex_path) as safe_path:
       self._build_pex(interpreter, safe_path, [vt.target for vt in versioned_targets])
   return PEX(source_pex_path, interpreter=interpreter)
Exemplo n.º 20
0
  def execute(self):
    interpreter = None
    python_tgts = self.context.targets(lambda tgt: isinstance(tgt, PythonTarget))
    fs = PythonInterpreterFingerprintStrategy(task=self)
    with self.invalidated(python_tgts, fingerprint_strategy=fs) as invalidation_check:
      # If there are no relevant targets, we still go through the motions of selecting
      # an interpreter, to prevent downstream tasks from having to check for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
            invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'
      interpreter_path_file = os.path.join(self.workdir, target_set_id, 'interpreter.path')
      if not os.path.exists(interpreter_path_file):
        interpreter_cache = PythonInterpreterCache(PythonSetup.global_instance(),
                                                   PythonRepos.global_instance(),
                                                   logger=self.context.log.debug)

        # We filter the interpreter cache itself (and not just the interpreters we pull from it)
        # because setting up some python versions (e.g., 3<=python<3.3) crashes, and this gives us
        # an escape hatch.
        filters = self.get_options().constraints or [b'']

        # Cache setup's requirement fetching can hang if run concurrently by another pants proc.
        self.context.acquire_lock()
        try:
          interpreter_cache.setup(filters=filters)
        finally:
          self.context.release_lock()

        interpreter = interpreter_cache.select_interpreter_for_targets(python_tgts)
        safe_mkdir_for(interpreter_path_file)
        with open(interpreter_path_file, 'w') as outfile:
          outfile.write(b'{}\t{}\n'.format(interpreter.binary, str(interpreter.identity)))
          for dist, location in interpreter.extras.items():
            dist_name, dist_version = dist
            outfile.write(b'{}\t{}\t{}\n'.format(dist_name, dist_version, location))

    if not interpreter:
      with open(interpreter_path_file, 'r') as infile:
        lines = infile.readlines()
        binary, identity = lines[0].strip().split('\t')
        extras = {}
        for line in lines[1:]:
          dist_name, dist_version, location = line.strip().split('\t')
          extras[(dist_name, dist_version)] = location

      interpreter = PythonInterpreter(binary, PythonIdentity.from_path(identity), extras)

    self.context.products.get_data(PythonInterpreter, lambda: interpreter)
Exemplo n.º 21
0
 def _get_pex_for_versioned_targets(self, interpreter, versioned_targets):
   if versioned_targets:
     target_set_id = VersionedTargetSet.from_versioned_targets(versioned_targets).cache_key.hash
   else:
     # If there are no relevant targets, we still go through the motions of gathering
     # an empty set of sources, to prevent downstream tasks from having to check
     # for this special case.
     target_set_id = 'no_targets'
   source_pex_path = os.path.realpath(os.path.join(self.workdir, target_set_id))
   # Note that we check for the existence of the directory, instead of for invalid_vts,
   # to cover the empty case.
   if not os.path.isdir(source_pex_path):
     # Note that we use the same interpreter for all targets: We know the interpreter
     # is compatible (since it's compatible with all targets in play).
     with safe_concurrent_creation(source_pex_path) as safe_path:
       self._build_pex(interpreter, safe_path, [vt.target for vt in versioned_targets])
   return PEX(source_pex_path, interpreter=interpreter)
Exemplo n.º 22
0
 def _get_pex_for_versioned_targets(self, interpreter, versioned_targets):
   if versioned_targets:
     target_set_id = VersionedTargetSet.from_versioned_targets(versioned_targets).cache_key.hash
   else:
     # If there are no relevant targets, we still go through the motions of gathering
     # an empty set of sources, to prevent downstream tasks from having to check
     # for this special case.
     target_set_id = 'no_targets'
   source_pex_path = os.path.realpath(os.path.join(self.workdir, target_set_id))
   # Note that we check for the existence of the directory, instead of for invalid_vts,
   # to cover the empty case.
   if not os.path.isdir(source_pex_path):
     # Note that we use the same interpreter for all targets: We know the interpreter
     # is compatible (since it's compatible with all targets in play).
     with safe_concurrent_creation(source_pex_path) as safe_path:
       self._build_pex(interpreter, safe_path, [vt.target for vt in versioned_targets])
   return PEX(source_pex_path, interpreter=interpreter)
    def execute(self):
        native_lib_tgts = self.context.targets(
            self.native_library_constraint.satisfied_by)
        if native_lib_tgts:
            with self.invalidated(
                    native_lib_tgts,
                    invalidate_dependents=True) as invalidation_check:
                resolve_vts = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts)
                if invalidation_check.invalid_vts or not resolve_vts.valid:
                    for vt in invalidation_check.all_vts:
                        self._fetch_packages(vt)

                native_external_libs_product = self._collect_external_libs(
                    invalidation_check.all_vts)
                self.context.products.register_data(
                    NativeExternalLibraryFiles, native_external_libs_product)
Exemplo n.º 24
0
    def execute(self):
        task_product = self.context.products.get_data(
            self.NativeExternalLibraryFiles, self.NativeExternalLibraryFiles)

        native_lib_tgts = self.context.targets(
            self.native_library_constraint.satisfied_by)
        if native_lib_tgts:
            with self.invalidated(
                    native_lib_tgts,
                    invalidate_dependents=True) as invalidation_check:
                resolve_vts = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts)
                vts_results_dir = self._prepare_vts_results_dir(resolve_vts)
                if invalidation_check.invalid_vts or not resolve_vts.valid:
                    for vt in invalidation_check.all_vts:
                        self._fetch_packages(vt, vts_results_dir)
                self._populate_task_product(vts_results_dir, task_product)
    def resolve_requirements(self, interpreter, req_libs):
        """Requirements resolution for PEX files.

        NB: This method always resolve all requirements in `req_libs` for the 'current' platform! Tasks
        such as PythonBinaryCreate which export code meant for other machines to run will need to
        resolve against the platforms specified by the target or via pants options.

        :param interpreter: Resolve against this :class:`PythonInterpreter`.
        :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve.
        :returns: a PEX containing target requirements and any specified python dist targets.
        """
        with self.invalidated(req_libs) as invalidation_check:
            # If there are no relevant targets, we still go through the motions of resolving
            # an empty set of requirements, to prevent downstream tasks from having to check
            # for this special case.
            if invalidation_check.all_vts:
                target_set_id = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts).cache_key.hash
            else:
                target_set_id = "no_targets"

            # NB: Since PythonBinaryCreate is the only task that exports python code for use outside the
            # host system, it's the only python task that needs to resolve for non-'current'
            # platforms. PythonBinaryCreate will actually validate the platforms itself when resolving
            # requirements, instead of using this method, so we can always resolve for 'current' here in
            # order to pull in any binary or universal dists needed for the currently executing host.
            platforms = ["current"]

            path = os.path.realpath(
                os.path.join(self.workdir, str(interpreter.identity),
                             target_set_id))
            # Note that we check for the existence of the directory, instead of for invalid_vts,
            # to cover the empty case.
            if not os.path.isdir(path):
                with safe_concurrent_creation(path) as safe_path:
                    pex_builder = PexBuilderWrapper.Factory.create(
                        builder=PEXBuilder(path=safe_path,
                                           interpreter=interpreter,
                                           copy=True),
                        log=self.context.log,
                    )
                    pex_builder.add_requirement_libs_from(req_libs,
                                                          platforms=platforms)
                    pex_builder.freeze()
        return PEX(path, interpreter=interpreter)
Exemplo n.º 26
0
  def create_pex(self, pex_info=None):
    """Returns a wrapped pex that "merges" the other pexes via PEX_PATH."""
    relevant_targets = self.context.targets(
      lambda tgt: isinstance(tgt, (
        PythonDistribution, PythonRequirementLibrary, PythonTarget, Files)))
    with self.invalidated(relevant_targets) as invalidation_check:

      # If there are no relevant targets, we still go through the motions of resolving
      # an empty set of requirements, to prevent downstream tasks from having to check
      # for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
          invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'

      interpreter = self.context.products.get_data(PythonInterpreter)
      path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id))

      # Note that we check for the existence of the directory, instead of for invalid_vts,
      # to cover the empty case.
      if not os.path.isdir(path):
        pexes = [
          self.context.products.get_data(ResolveRequirements.REQUIREMENTS_PEX),
          self.context.products.get_data(GatherSources.PYTHON_SOURCES)
        ]

        if self.extra_requirements():
          extra_requirements_pex = self.resolve_requirement_strings(
            interpreter, self.extra_requirements())
          # Add the extra requirements first, so they take precedence over any colliding version
          # in the target set's dependency closure.
          pexes = [extra_requirements_pex] + pexes
        constraints = {constraint for rt in relevant_targets if is_python_target(rt)
                       for constraint in rt.compatibility}

        with self.merged_pex(path, pex_info, interpreter, pexes, constraints) as builder:
          for extra_file in self.extra_files():
            extra_file.add_to(builder)
          builder.freeze()

    return WrappedPEX(PEX(path, interpreter))
Exemplo n.º 27
0
  def resolve_requirements(self, req_libs):
    with self.invalidated(req_libs) as invalidation_check:
      # If there are no relevant targets, we still go through the motions of resolving
      # an empty set of requirements, to prevent downstream tasks from having to check
      # for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
            invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'

      interpreter = self.context.products.get_data(PythonInterpreter)
      path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id))

      # Note that we check for the existence of the directory, instead of for invalid_vts,
      # to cover the empty case.
      if not os.path.isdir(path):
        with safe_concurrent_creation(path) as safe_path:
          self._build_requirements_pex(interpreter, safe_path, req_libs)
    return PEX(path, interpreter=interpreter)
Exemplo n.º 28
0
  def execute(self):
    interpreter = None
    python_tgts = self.context.targets(lambda tgt: isinstance(tgt, PythonTarget))
    fs = PythonInterpreterFingerprintStrategy()
    with self.invalidated(python_tgts, fingerprint_strategy=fs) as invalidation_check:
      # If there are no relevant targets, we still go through the motions of selecting
      # an interpreter, to prevent downstream tasks from having to check for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
            invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'
      interpreter_path_file = self._interpreter_path_file(target_set_id)
      if not os.path.exists(interpreter_path_file):
        self._create_interpreter_path_file(interpreter_path_file, python_tgts)

    if not interpreter:
      interpreter = self._get_interpreter(interpreter_path_file)

    self.context.products.get_data(PythonInterpreter, lambda: interpreter)
Exemplo n.º 29
0
    def execute(self):
        python_tgts = self.context.targets(
            lambda tgt: isinstance(tgt, PythonTarget))
        fs = PythonInterpreterFingerprintStrategy()
        with self.invalidated(python_tgts,
                              fingerprint_strategy=fs) as invalidation_check:
            # If there are no relevant targets, we still go through the motions of selecting
            # an interpreter, to prevent downstream tasks from having to check for this special case.
            if invalidation_check.all_vts:
                target_set_id = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts).cache_key.hash
            else:
                target_set_id = 'no_targets'
            interpreter_path_file = self._interpreter_path_file(target_set_id)
            if not os.path.exists(interpreter_path_file):
                self._create_interpreter_path_file(interpreter_path_file,
                                                   python_tgts)

        interpreter = self._get_interpreter(interpreter_path_file)
        self.context.products.register_data(PythonInterpreter, interpreter)
  def resolve_requirements(self, req_libs):
    with self.invalidated(req_libs) as invalidation_check:
      # If there are no relevant targets, we still go through the motions of resolving
      # an empty set of requirements, to prevent downstream tasks from having to check
      # for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
            invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'

      interpreter = self.context.products.get_data(PythonInterpreter)
      path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id))

      # Note that we check for the existence of the directory, instead of for invalid_vts,
      # to cover the empty case.
      if not os.path.isdir(path):
        with safe_concurrent_creation(path) as safe_path:
          self._build_requirements_pex(interpreter, safe_path, req_libs)
    return PEX(path, interpreter=interpreter)
Exemplo n.º 31
0
  def resolve_requirements(self, interpreter, req_libs):
    """Requirements resolution for PEX files.

    NB: This method always resolve all requirements in `req_libs` for the 'current' platform! Tasks
    such as PythonBinaryCreate which export code meant for other machines to run will need to
    resolve against the platforms specified by the target or via pants options.

    :param interpreter: Resolve against this :class:`PythonInterpreter`.
    :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve.
    :returns: a PEX containing target requirements and any specified python dist targets.
    """
    with self.invalidated(req_libs) as invalidation_check:
      # If there are no relevant targets, we still go through the motions of resolving
      # an empty set of requirements, to prevent downstream tasks from having to check
      # for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
            invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'

      # NB: Since PythonBinaryCreate is the only task that exports python code for use outside the
      # host system, it's the only python task that needs to resolve for non-'current'
      # platforms. PythonBinaryCreate will actually validate the platforms itself when resolving
      # requirements, instead of using this method, so we can always resolve for 'current' here in
      # order to pull in any binary or universal dists needed for the currently executing host.
      platforms = ['current']

      path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id))
      # Note that we check for the existence of the directory, instead of for invalid_vts,
      # to cover the empty case.
      if not os.path.isdir(path):
        with safe_concurrent_creation(path) as safe_path:
          pex_builder = PexBuilderWrapper.Factory.create(
            builder=PEXBuilder(path=safe_path, interpreter=interpreter, copy=True),
            log=self.context.log)
          pex_builder.add_requirement_libs_from(req_libs, platforms=platforms)
          pex_builder.freeze()
    return PEX(path, interpreter=interpreter)
Exemplo n.º 32
0
 def execute(self):
     # NB: Downstream product consumers may need the selected interpreter for use with
     # any type of importable Python target, including `PythonRequirementLibrary` targets
     # (for use with the `repl` goal, for instance). For interpreter selection,
     # we only care about targets with compatibility constraints.
     python_tgts_and_reqs = self.context.targets(lambda tgt: isinstance(
         tgt, (PythonTarget, PythonRequirementLibrary)))
     if not python_tgts_and_reqs:
         return
     python_tgts = [
         tgt for tgt in python_tgts_and_reqs
         if isinstance(tgt, PythonTarget)
     ]
     fs = PythonInterpreterFingerprintStrategy()
     with self.invalidated(python_tgts,
                           fingerprint_strategy=fs) as invalidation_check:
         # If there are no relevant targets, we still go through the motions of selecting
         # an interpreter, to prevent downstream tasks from having to check for this special case.
         if invalidation_check.all_vts:
             target_set_id = VersionedTargetSet.from_versioned_targets(
                 invalidation_check.all_vts).cache_key.hash
         else:
             target_set_id = 'no_targets'
         interpreter_path_file = self._interpreter_path_file(target_set_id)
         if not os.path.exists(interpreter_path_file):
             self._create_interpreter_path_file(interpreter_path_file,
                                                python_tgts)
         else:
             if self._detect_and_purge_invalid_interpreter(
                     interpreter_path_file):
                 self._create_interpreter_path_file(interpreter_path_file,
                                                    python_tgts)
     interpreter = self._get_interpreter(interpreter_path_file)
     if not interpreter:
         raise TaskError(
             'Pants could not load interpreter from path file: {}'.format(
                 interpreter_path_file))
     self.context.products.register_data(PythonInterpreter, interpreter)
Exemplo n.º 33
0
    def resolve_requirements(self, req_libs, local_dist_targets=None):
        """Requirements resolution for PEX files.

    :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve.
    :param local_dist_targets: A list of :class:`PythonDistribution` targets to resolve.
    :returns: a PEX containing target requirements and any specified python dist targets.
    """
        tgts = req_libs
        if local_dist_targets:
            tgts = req_libs + local_dist_targets
        with self.invalidated(tgts) as invalidation_check:
            # If there are no relevant targets, we still go through the motions of resolving
            # an empty set of requirements, to prevent downstream tasks from having to check
            # for this special case.
            if invalidation_check.all_vts:
                target_set_id = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts).cache_key.hash
            else:
                target_set_id = 'no_targets'

            interpreter = self.context.products.get_data(PythonInterpreter)
            path = os.path.realpath(
                os.path.join(self.workdir, str(interpreter.identity),
                             target_set_id))
            # Note that we check for the existence of the directory, instead of for invalid_vts,
            # to cover the empty case.
            if not os.path.isdir(path):
                with safe_concurrent_creation(path) as safe_path:
                    # Handle locally-built python distribution dependencies.
                    built_dists = self.context.products.get_data(
                        BuildLocalPythonDistributions.PYTHON_DISTS)
                    if built_dists:
                        req_libs = inject_synthetic_dist_requirements(
                            self.context.build_graph, built_dists, ':'.join(
                                2 * [target_set_id])) + req_libs
                    self._build_requirements_pex(interpreter, safe_path,
                                                 req_libs)
        return PEX(path, interpreter=interpreter)
Exemplo n.º 34
0
    def execute(self):
        req_libs = self.context.targets(lambda tgt: isinstance(tgt, PythonRequirementLibrary))
        fs = PythonRequirementFingerprintStrategy(task=self)
        with self.invalidated(req_libs, fingerprint_strategy=fs) as invalidation_check:
            # If there are no relevant targets, we still go through the motions of resolving
            # an empty set of requirements, to prevent downstream tasks from having to check
            # for this special case.
            if invalidation_check.all_vts:
                target_set_id = VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts).cache_key.hash
            else:
                target_set_id = "no_targets"

            interpreter = self.context.products.get_data(PythonInterpreter)
            path = os.path.join(self.workdir, str(interpreter.identity), target_set_id)
            path_tmp = path + ".tmp"

            shutil.rmtree(path_tmp, ignore_errors=True)

            if not os.path.isdir(path):
                self._build_pex(interpreter, path_tmp, req_libs)
                shutil.move(path_tmp, path)

        pex = PEX(os.path.realpath(path), interpreter=interpreter)
        self.context.products.get_data(self.REQUIREMENTS_PEX, lambda: pex)
Exemplo n.º 35
0
    def resolve(self, workdir, invalidation_check, workunit_factory):
        # Install all of the resolve specs at once, into a single dir which has a name determined by
        # the fingerprint of all the relevant targets in the transitive closure.
        # NB: Obtaining the dir name is currently our only use of the VersionedTargetSet.
        resolve_vts = VersionedTargetSet.from_versioned_targets(
            invalidation_check.all_vts)
        vts_results_dir = os.path.join(workdir, resolve_vts.cache_key.hash)
        safe_mkdir(vts_results_dir)

        all_zef_reqs = []
        for zef_vt in invalidation_check.all_vts:
            all_zef_reqs.extend(zef_vt.target.requirements)
        install_request = self.ZefInstallRequest(zef_requirements=all_zef_reqs,
                                                 into_dir=vts_results_dir)

        if invalidation_check.invalid_vts or not resolve_vts.valid:
            install_result = self.install_requirements(
                install_request, workunit_factory=workunit_factory)
        else:
            # No-op -- we have a (hopefully) well-formed install dir at this location already.
            install_result = self.ZefInstallResult(
                install_request.as_install_spec)

        return install_result
Exemplo n.º 36
0
    def execute(self):
        # NB: Downstream product consumers may need the selected interpreter for use with
        # any type of importable Python target, including `PythonRequirementLibrary` targets
        # (for use with the `repl` goal, for instance). For interpreter selection,
        # we only care about targets with compatibility constraints.
        python_tgts_and_reqs = self.context.targets(lambda tgt: isinstance(
            tgt, (PythonTarget, PythonRequirementLibrary)))
        if not python_tgts_and_reqs:
            return
        python_tgts = [
            tgt for tgt in python_tgts_and_reqs
            if isinstance(tgt, PythonTarget)
        ]
        fs = PythonInterpreterFingerprintStrategy()
        with self.invalidated(python_tgts,
                              fingerprint_strategy=fs) as invalidation_check:
            if (PythonSetup.global_instance().interpreter_search_paths
                    and PythonInterpreterCache.pex_python_paths()):
                self.context.log.warn(
                    "Detected both PEX_PYTHON_PATH and "
                    "--python-setup-interpreter-search-paths. Ignoring "
                    "--python-setup-interpreter-search-paths.")
            # If there are no relevant targets, we still go through the motions of selecting
            # an interpreter, to prevent downstream tasks from having to check for this special case.
            if invalidation_check.all_vts:
                target_set_id = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts).cache_key.hash
            else:
                target_set_id = 'no_targets'
            interpreter_path_file = self._interpreter_path_file(target_set_id)
            if not os.path.exists(interpreter_path_file):
                self._create_interpreter_path_file(interpreter_path_file,
                                                   python_tgts)

        interpreter = self._get_interpreter(interpreter_path_file)
        self.context.products.register_data(PythonInterpreter, interpreter)
Exemplo n.º 37
0
 def _vts_for_partition(invalidation_check):
   return VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts)
Exemplo n.º 38
0
  def _ivy_resolve(self,
                  targets,
                  executor=None,
                  silent=False,
                  workunit_name=None,
                  confs=None,
                  extra_args=None,
                  invalidate_dependents=False,
                  pinned_artifacts=None):
    """Resolves external dependencies for the given targets.

    If there are no targets suitable for jvm transitive dependency resolution, an empty result is
    returned, ie: ([], {}, None).

    :param targets: The targets to resolve jvm dependencies for.
    :type targets: :class:`collections.Iterable` of :class:`pants.build_graph.target.Target`
    :param executor: A java executor to run ivy with.
    :type executor: :class:`pants.java.executor.Executor`

    :param confs: The ivy configurations to resolve; ('default',) by default.
    :type confs: :class:`collections.Iterable` of string
    :param extra_args: Any extra command line arguments to pass to ivy.
    :type extra_args: list of string
    :param bool invalidate_dependents: `True` to invalidate dependents of targets that needed to be
                                        resolved.
    :returns: A tuple of the classpath, a mapping from ivy cache jars to their linked location
              under .pants.d, and the id of the reports associated with the resolve.
    :rtype: tuple of (list, dict, string)
    """
    if not targets:
      return [], {}, None

    confs = confs or ('default',)
    extra_args = extra_args or []

    fingerprint_strategy = IvyResolveFingerprintStrategy(confs)

    # NB: See class pydoc regarding `use_cache=False`.
    with self.invalidated(targets,
                          invalidate_dependents=invalidate_dependents,
                          silent=silent,
                          fingerprint_strategy=fingerprint_strategy,
                          use_cache=False) as invalidation_check:
      if not invalidation_check.all_vts:
        return [], {}, None
      global_vts = VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts)

      resolve_hash_name = global_vts.cache_key.hash

      ivy_workdir = os.path.join(self.context.options.for_global_scope().pants_workdir, 'ivy')
      target_workdir = os.path.join(ivy_workdir, resolve_hash_name)

      target_classpath_file = os.path.join(target_workdir, 'classpath')
      raw_target_classpath_file = target_classpath_file + '.raw'

      # If a report file is not present, we need to exec ivy, even if all the individual
      # targets are up to date. See https://rbcommons.com/s/twitter/r/2015.
      # Note that it's possible for all targets to be valid but for no classpath file to exist at
      # target_classpath_file, e.g., if we previously built a superset of targets.
      any_report_missing, existing_report_paths = self._collect_existing_reports(confs, resolve_hash_name)
      if (invalidation_check.invalid_vts or
          any_report_missing or
          not os.path.exists(raw_target_classpath_file)):

        ivy = Bootstrapper.default_ivy(bootstrap_workunit_factory=self.context.new_workunit)
        raw_target_classpath_file_tmp = raw_target_classpath_file + '.tmp'
        args = ['-cachepath', raw_target_classpath_file_tmp] + extra_args

        self._exec_ivy(
            target_workdir=target_workdir,
            targets=global_vts.targets,
            args=args,
            executor=executor,
            ivy=ivy,
            workunit_name=workunit_name,
            confs=confs,
            use_soft_excludes=self.get_options().soft_excludes,
            resolve_hash_name=resolve_hash_name,
            pinned_artifacts=pinned_artifacts)

        if not os.path.exists(raw_target_classpath_file_tmp):
          raise self.Error('Ivy failed to create classpath file at {}'
                           .format(raw_target_classpath_file_tmp))
        shutil.move(raw_target_classpath_file_tmp, raw_target_classpath_file)
        logger.debug('Moved ivy classfile file to {dest}'.format(dest=raw_target_classpath_file))
      else:
        logger.debug("Using previously resolved reports: {}".format(existing_report_paths))

    # Make our actual classpath be symlinks, so that the paths are uniform across systems.
    # Note that we must do this even if we read the raw_target_classpath_file from the artifact
    # cache. If we cache the target_classpath_file we won't know how to create the symlinks.
    with IvyTaskMixin.symlink_map_lock:
      # A common dir for symlinks into the ivy2 cache. This ensures that paths to jars
      # in artifact-cached analysis files are consistent across systems.
      # Note that we have one global, well-known symlink dir, again so that paths are
      # consistent across builds.
      symlink_dir = os.path.join(ivy_workdir, 'jars')
      symlink_map = IvyUtils.symlink_cachepath(self.ivy_cache_dir,
                                               raw_target_classpath_file,
                                               symlink_dir,
                                               target_classpath_file)

      classpath = IvyUtils.load_classpath_from_cachepath(target_classpath_file)
      return classpath, symlink_map, resolve_hash_name
Exemplo n.º 39
0
 def tool_vts(self, invalidation_check):
   # The monolithic shaded tool jar is a single output dependent on the entire target set, and is
   # not divisible by target. So we can only cache it keyed by the entire target set.
   return VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts)
Exemplo n.º 40
0
  def _ivy_resolve(self,
                  targets,
                  executor=None,
                  silent=False,
                  workunit_name=None,
                  confs=None,
                  extra_args=None,
                  invalidate_dependents=False,
                  pinned_artifacts=None):
    """Resolves external dependencies for the given targets.

    If there are no targets suitable for jvm transitive dependency resolution, an empty result is
    returned.

    :param targets: The targets to resolve jvm dependencies for.
    :type targets: :class:`collections.Iterable` of :class:`pants.build_graph.target.Target`
    :param executor: A java executor to run ivy with.
    :type executor: :class:`pants.java.executor.Executor`

    :param confs: The ivy configurations to resolve; ('default',) by default.
    :type confs: :class:`collections.Iterable` of string
    :param extra_args: Any extra command line arguments to pass to ivy.
    :type extra_args: list of string
    :param bool invalidate_dependents: `True` to invalidate dependents of targets that needed to be
                                        resolved.
    :returns: The result of the resolve.
    :rtype: IvyResolveResult
    """
    # If there are no targets, we don't need to do a resolve.
    if not targets:
      return _NO_RESOLVE_RUN_RESULT

    confs = confs or ('default',)
    extra_args = extra_args or []

    fingerprint_strategy = IvyResolveFingerprintStrategy(confs)

    # NB: See class pydoc regarding `use_cache=False`.
    with self.invalidated(targets,
                          invalidate_dependents=invalidate_dependents,
                          silent=silent,
                          fingerprint_strategy=fingerprint_strategy,
                          use_cache=False) as invalidation_check:
      # In case all the targets were filtered out because they didn't participate in fingerprinting.
      if not invalidation_check.all_vts:
        return _NO_RESOLVE_RUN_RESULT

      resolve_vts = VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts)

      resolve_hash_name = resolve_vts.cache_key.hash

      ivy_workdir = os.path.join(self.context.options.for_global_scope().pants_workdir, 'ivy')
      resolve_workdir = os.path.join(ivy_workdir, resolve_hash_name)

      symlink_classpath_filename = os.path.join(resolve_workdir, 'classpath')
      ivy_cache_classpath_filename = symlink_classpath_filename + '.raw'

      workdir_reports_by_conf = {c: self._resolve_report_path(resolve_workdir, c) for c in confs}

      def resolve_result_files_exist():
        return (all(os.path.isfile(report) for report in workdir_reports_by_conf.values()) and
                os.path.isfile(ivy_cache_classpath_filename))

      # Check for a previous run's resolution result files. If they exist try to load a result using
      # them. If that fails, fall back to doing a resolve and loading its results.
      if not invalidation_check.invalid_vts and resolve_result_files_exist():
        result = self._load_from_resolve(ivy_cache_classpath_filename, symlink_classpath_filename,
                                          ivy_workdir, resolve_hash_name, workdir_reports_by_conf)
        if result.all_linked_artifacts_exist():
          return result

      self._do_resolve(confs, executor, extra_args, resolve_vts, pinned_artifacts,
                            ivy_cache_classpath_filename,
                            resolve_hash_name, resolve_workdir, workunit_name)

      return self._load_from_resolve(ivy_cache_classpath_filename,
                                     symlink_classpath_filename,
                                     ivy_workdir,
                                     resolve_hash_name,
                                     workdir_reports_by_conf)
Exemplo n.º 41
0
 def check_artifact_cache_for(self, invalidation_check):
   global_vts = VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts)
   return [global_vts]
Exemplo n.º 42
0
  def _ivy_resolve(self,
                   targets,
                   executor=None,
                   silent=False,
                   workunit_name=None,
                   confs=None,
                   extra_args=None,
                   invalidate_dependents=False,
                   pinned_artifacts=None):
    """Resolves external dependencies for the given targets."""
    # If there are no targets, we don't need to do a resolve.
    if not targets:
      return NO_RESOLVE_RUN_RESULT
    confs = confs or ('default',)
    fingerprint_strategy = IvyResolveFingerprintStrategy(confs)
    with self.invalidated(targets,
                          invalidate_dependents=invalidate_dependents,
                          silent=silent,
                          fingerprint_strategy=fingerprint_strategy) as invalidation_check:
      # In case all the targets were filtered out because they didn't participate in fingerprinting.
      if not invalidation_check.all_vts:
        return NO_RESOLVE_RUN_RESULT
      resolve_vts = VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts)
      resolve_hash_name = resolve_vts.cache_key.hash
      global_ivy_workdir = os.path.join(self.context.options.for_global_scope().pants_workdir,
                                        'ivy')
      fetch = self._create_ivy_fetch_step(confs,
                                          resolve_hash_name,
                                          pinned_artifacts,
                                          self.get_options().soft_excludes,
                                          self.ivy_cache_dir,
                                          global_ivy_workdir)

      resolve = self._create_ivy_resolve_step(confs,
                                              resolve_hash_name,
                                              pinned_artifacts,
                                              self.get_options().soft_excludes,
                                              self.ivy_cache_dir,
                                              global_ivy_workdir,
                                              self.global_excludes)
      result = self._perform_resolution(
        fetch, resolve, executor, extra_args, invalidation_check, resolve_vts, resolve_vts.targets, workunit_name,
      )

      # NOTE(mateo): Wiring up our own reports, the full ivy report is too heavy weight for our purposes.
      if result.resolved_artifact_paths and self.resolution_report_outdir and not self.get_options().disable_reports:
        # This is added to get a reasonable handle for managed_dependencies target sets.
        # If there is more than one VT it defaults to the VTS.id, which is a non-human-readable cache key.
        # If we wanted to be more performant than rigorous, we could bail after the first query.
        managed_dependencies = set(
          j.target.managed_dependencies
          for j in invalidation_check.all_vts
          if isinstance(j.target, JarLibrary) and
          j.target.managed_dependencies is not None
        )

        if managed_dependencies and len(managed_dependencies) > 1:
          raise TaskError(
            'Each partition should be mapped to a single managed_dependencies target: (was: {})\n Targets: {}'
            .format(managed_dependencies, resolve_vts.targets)
          )
        default_target_name = JarDependencyManagement.global_instance()._default_target.name
        partition_name = list(managed_dependencies)[0].name if managed_dependencies else default_target_name
        self.write_resolve_report(resolve.frozen_resolve_file, partition_name)
      return result
Exemplo n.º 43
0
 def check_artifact_cache_for(self, invalidation_check):
     # Ivy resolution is an output dependent on the entire target set, and is not divisible
     # by target. So we can only cache it keyed by the entire target set.
     global_vts = VersionedTargetSet.from_versioned_targets(
         invalidation_check.all_vts)
     return [global_vts]
Exemplo n.º 44
0
 def check_artifact_cache_for(self, invalidation_check):
   # Ivy resolution is an output dependent on the entire target set, and is not divisible
   # by target. So we can only cache it keyed by the entire target set.
   global_vts = VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts)
   return [global_vts]
Exemplo n.º 45
0
    def _ivy_resolve(self,
                     targets,
                     executor=None,
                     silent=False,
                     workunit_name=None,
                     confs=None,
                     extra_args=None,
                     invalidate_dependents=False,
                     pinned_artifacts=None):
        """Resolves external dependencies for the given targets."""
        # If there are no targets, we don't need to do a resolve.
        if not targets:
            return NO_RESOLVE_RUN_RESULT
        confs = confs or ('default', )
        fingerprint_strategy = IvyResolveFingerprintStrategy(confs)
        with self.invalidated(targets,
                              invalidate_dependents=invalidate_dependents,
                              silent=silent,
                              fingerprint_strategy=fingerprint_strategy
                              ) as invalidation_check:
            # In case all the targets were filtered out because they didn't participate in fingerprinting.
            if not invalidation_check.all_vts:
                return NO_RESOLVE_RUN_RESULT
            resolve_vts = VersionedTargetSet.from_versioned_targets(
                invalidation_check.all_vts)
            resolve_hash_name = resolve_vts.cache_key.hash
            global_ivy_workdir = os.path.join(
                self.context.options.for_global_scope().pants_workdir, 'ivy')
            fetch = self._create_ivy_fetch_step(
                confs, resolve_hash_name, pinned_artifacts,
                self.get_options().soft_excludes, self.ivy_cache_dir,
                global_ivy_workdir)

            resolve = self._create_ivy_resolve_step(
                confs, resolve_hash_name, pinned_artifacts,
                self.get_options().soft_excludes, self.ivy_cache_dir,
                global_ivy_workdir, self.global_excludes)
            result = self._perform_resolution(
                fetch,
                resolve,
                executor,
                extra_args,
                invalidation_check,
                resolve_vts,
                resolve_vts.targets,
                workunit_name,
            )

            # NOTE(mateo): Wiring up our own reports, the full ivy report is too heavy weight for our purposes.
            if result.resolved_artifact_paths and self.resolution_report_outdir and not self.get_options(
            ).disable_reports:
                # This is added to get a reasonable handle for managed_dependencies target sets.
                # If there is more than one VT it defaults to the VTS.id, which is a non-human-readable cache key.
                # If we wanted to be more performant than rigorous, we could bail after the first query.
                managed_dependencies = set(
                    j.target.managed_dependencies
                    for j in invalidation_check.all_vts
                    if isinstance(j.target, JarLibrary)
                    and j.target.managed_dependencies is not None)

                if managed_dependencies and len(managed_dependencies) > 1:
                    raise TaskError(
                        'Each partition should be mapped to a single managed_dependencies target: (was: {})\n Targets: {}'
                        .format(managed_dependencies, resolve_vts.targets))
                default_target_name = JarDependencyManagement.global_instance(
                )._default_target.name
                partition_name = list(
                    managed_dependencies
                )[0].name if managed_dependencies else default_target_name
                self.write_resolve_report(resolve.frozen_resolve_file,
                                          partition_name)
            return result
Exemplo n.º 46
0
 def check_artifact_cache_for(self, invalidation_check):
   """Tells the artifact cache mechanism that we have a single artifact per global classpath."""
   global_vts = VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts)
   return [global_vts]
Exemplo n.º 47
0
  def execute(self):

    # Pants does no longer allows options to be tuples or sets. So we use lists of dicts and then convert into
    # hashable structures here.

    # Pins converted to { (org, name): rev, ... }
    global_pinned_tuples = {}
    for pin in self.get_options().global_pinned_versions:
      artifact_tuple = (pin['org'], pin['name'])
      if artifact_tuple in global_pinned_tuples:
        raise Exception('An artifact has conflicting overrides!:\n{}:{} and\n'
          '{}'.format(artifact_tuple, pin['rev'], global_pinned_tuples[artifact_tuple]))
      global_pinned_tuples[artifact_tuple] = pin['rev']

    # Overrrides converted to { (org, name, rev): /path/to/artifact, ... }
    override_tuples = {}
    for override in self.get_options().local_override_versions:
      override_tuples[(override['org'], override['name'], override['rev'])] = override['artifact_path']

    # Exclusions converted to [(org, name), ...]
    global_exclusion_tuples = []
    for exclusion in self.get_options().global_exclusions:
      global_exclusion_tuples.append((exclusion['org'], exclusion['name']))

    global_exclusions = frozenset(global_exclusion_tuples)
    global_pinned_versions = dict(global_pinned_tuples)
    local_override_versions = override_tuples
    fetchers = ChainedFetcher(self.get_options().maven_repos)

    invalidation_context_manager = self.invalidated(
      self.all_jar_libs,
      invalidate_dependents=False,
      fingerprint_strategy=PomResolveFingerprintStrategy(global_exclusions, global_pinned_versions),
    )

    with invalidation_context_manager as invalidation_check:
      # NOTE: In terms of caching this models IvyResolve in pants quite closely. We always
      # operate over and cache in terms of the global set of jar dependencies. Note that we override
      # `check_artifact_cache_for` in order to get the artifact cache to respect this.
      global_vts = VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts)
      vts_workdir = os.path.join(self.workdir, global_vts.cache_key.hash)
      analysis_path = os.path.join(vts_workdir, 'analysis.pickle')
      if invalidation_check.invalid_vts or not os.path.exists(analysis_path):
        with self.context.new_workunit('traverse-pom-graph'):
          global_dep_graph, target_to_dep_graph = self.resolve_dependency_graphs(
            self.all_jar_libs,
            fetchers,
            global_exclusions,
            global_pinned_versions,
          )
          self.report_unused_pins_and_exclusions(
            global_dep_graph,
            global_pinned_versions,
            global_exclusions,
          )
        # TODO: Not super happy about using target.id really anywhere, since it's just a name.
        # But for now this is all completely invalidated whenever any part of 3rdparty:: changes.
        # It might however be possible that just renaming a JarLib (and doing nothing else) will
        # break this.
        for target, dep_graph in target_to_dep_graph.items():
          self.target_to_maven_coordinate_closure[target.id] = list(dep_graph.artifact_closure())
        copied_coord_to_artifacts = deepcopy(global_dep_graph._coord_to_provided_artifacts)
        self.maven_coordinate_to_provided_artifacts.update(copied_coord_to_artifacts)
        safe_mkdir(vts_workdir)
        # NOTE: These products are only used by pom-ivy-diff, which is only there for debugging.
        # It will probably go away within a few months, at which point these products optionally
        # can too.  But they might also be useful to future downstream tasks.
        analysis = {
          'target_to_maven_coordinate_closure': self.target_to_maven_coordinate_closure,
          'maven_coordinate_to_provided_artifacts': self.maven_coordinate_to_provided_artifacts,
          'global_dep_graph': global_dep_graph,
        }
        with open(analysis_path, 'wb') as f:
          pickle.dump(analysis, f)
        if self.artifact_cache_writes_enabled():
          self.update_artifact_cache([(global_vts, [analysis_path])])
      else:
        with open(analysis_path, 'rb') as f:
          analysis = pickle.load(f)
        self.target_to_maven_coordinate_closure.update(
          analysis['target_to_maven_coordinate_closure'],
        )
        self.maven_coordinate_to_provided_artifacts.update(
          analysis['maven_coordinate_to_provided_artifacts'],
        )
        global_dep_graph = analysis['global_dep_graph']

    self.report_for_artifacts(global_dep_graph)
    conflicted_deps = global_dep_graph.conflicted_dependencies()
    if conflicted_deps:
      self.report_conflicted_deps(
        conflicted_deps,
        global_dep_graph.reverse_unversioned_dep_graph(),
        global_dep_graph,
      )
      raise Exception(
        'PomResolve found {} conflicting dependencies.  These must be explicitly'
        ' pinned or excluded in order to generate a consistent global classpath.'
        ' See the output above for details, and try `./pants pom-resolve --help`'
        ' for information on flags to get more detailed reporting.'
        .format(len(conflicted_deps)))

    all_artifacts = set()
    for coord_closure in self.target_to_maven_coordinate_closure.values():
      for coord in coord_closure:
        for artifact in self.maven_coordinate_to_provided_artifacts[coord]:
          all_artifacts.add(artifact)

    classpath_dump_file = self.get_options().dump_classpath_file
    if classpath_dump_file:
      with open(classpath_dump_file, 'wb') as f:
          f.write('FINGERPRINT: {}\n'.format(global_vts.cache_key.hash))
          for artifact in sorted(all_artifacts):
            f.write('{}\n'.format(artifact))
      logger.info('Dumped classpath file to {}'.format(classpath_dump_file))

    with self.context.new_workunit('fetch-artifacts'):
      coord_to_artifact_symlinks = self._fetch_artifacts(local_override_versions)

    if self.get_options().fetch_source_jars:
      with self.context.new_workunit('fetch-source-jars'):
        symlink_dir = os.path.join(
          self.pom_cache_dir,
          'source-jars-symlink-farms',
          global_vts.cache_key.hash,
        )
        if not os.path.exists(symlink_dir):
          self._fetch_source_jars(fetchers, symlink_dir)
        stderr('\nFetched source jars to {}'.format(symlink_dir))

    classpath_info_filename = self.get_options().write_classpath_info_to_file
    if classpath_info_filename:
      classpath_info = {
        'fingerprint': global_vts.cache_key.hash,
        'classpath': [
           {
             'path': os.path.join(self.pom_cache_dir, artifact.artifact_path),
             'groupId': artifact.groupId,
             'artifactId': artifact.artifactId,
             'version': artifact.version,
             'packaging': artifact.packaging,
             'classifier': artifact.classifier,
           }
           for artifact in all_artifacts
        ],
      }
      with open(classpath_info_filename, 'w') as classpath_info_file:
        classpath_info_file.write(json.dumps(classpath_info))
      logger.info('Wrote classpath info JSON to {}.'.format(classpath_info_filename))

    with self.context.new_workunit('populate-compile-classpath'):
      self._populate_compile_classpath()
Exemplo n.º 48
0
 def _vts_for_partition(invalidation_check):
     return VersionedTargetSet.from_versioned_targets(
         invalidation_check.all_vts)
Exemplo n.º 49
0
 def tool_vts(self, invalidation_check):
     # The monolithic shaded tool jar is a single output dependent on the entire target set, and is
     # not divisible by target. So we can only cache it keyed by the entire target set.
     return VersionedTargetSet.from_versioned_targets(
         invalidation_check.all_vts)
Exemplo n.º 50
0
    def resolve(self, targets, compile_classpath, sources, javadoc):
        """
    This is the core function for coursier resolve.

    Validation strategy:

    1. All targets are going through the `invalidated` to get fingerprinted in the target level.
       No cache is fetched at this stage because it is disabled.
    2. Once each target is fingerprinted, we combine them into a `VersionedTargetSet` where they
       are fingerprinted together, because each run of 3rdparty resolve is context sensitive.

    Artifacts are stored in `VersionedTargetSet`'s results_dir, the contents are the aggregation of
    each coursier run happened within that context.

    Caching: (TODO): https://github.com/pantsbuild/pants/issues/5187
    Currently it is disabled due to absolute paths in the coursier results.

    :param targets: a collection of targets to do 3rdparty resolve against
    :param compile_classpath: classpath product that holds the resolution result. IMPORTANT: this parameter will be changed.
    :param sources: if True, fetch sources for 3rdparty
    :param javadoc: if True, fetch javadoc for 3rdparty
    :return: n/a
    """
        manager = JarDependencyManagement.global_instance()

        jar_targets = manager.targets_by_artifact_set(targets)

        for artifact_set, target_subset in jar_targets.items():
            # TODO(wisechengyi): this is the only place we are using IvyUtil method, which isn't specific to ivy really.
            raw_jar_deps, global_excludes = IvyUtils.calculate_classpath(
                target_subset)

            # ['sources'] * False = [], ['sources'] * True = ['sources']
            confs_for_fingerprint = ['sources'] * sources + ['javadoc'
                                                             ] * javadoc
            fp_strategy = CoursierResolveFingerprintStrategy(
                confs_for_fingerprint)

            compile_classpath.add_excludes_for_targets(target_subset)

            with self.invalidated(
                    target_subset,
                    invalidate_dependents=False,
                    silent=False,
                    fingerprint_strategy=fp_strategy) as invalidation_check:

                if not invalidation_check.all_vts:
                    continue

                pants_workdir = self.get_options().pants_workdir
                resolve_vts = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts)

                vt_set_results_dir = self._prepare_vts_results_dir(
                    pants_workdir, resolve_vts)
                coursier_cache_dir, pants_jar_base_dir = self._prepare_workdir(
                    pants_workdir)

                # Check each individual target without context first
                if not invalidation_check.invalid_vts:

                    # If the individuals are valid, check them as a VersionedTargetSet
                    if resolve_vts.valid:
                        # Load up from the results dir
                        success = self._load_from_results_dir(
                            compile_classpath, vt_set_results_dir,
                            coursier_cache_dir, invalidation_check,
                            pants_jar_base_dir)
                        if success:
                            return

                jars_to_resolve, pinned_coords = self._compute_jars_to_resolve_and_pin(
                    raw_jar_deps, artifact_set, manager)

                results = self._get_result_from_coursier(
                    jars_to_resolve, global_excludes, pinned_coords,
                    pants_workdir, coursier_cache_dir, sources, javadoc)

                for conf, result_list in results.items():
                    for result in result_list:
                        self._load_json_result(
                            conf, compile_classpath, coursier_cache_dir,
                            invalidation_check, pants_jar_base_dir, result,
                            self._override_classifiers_for_conf(conf))

                self._populate_results_dir(vt_set_results_dir, results)
                resolve_vts.update()
Exemplo n.º 51
0
  def _ivy_resolve(self,
                   targets,
                   executor=None,
                   silent=False,
                   workunit_name=None,
                   confs=None,
                   extra_args=None,
                   invalidate_dependents=False,
                   pinned_artifacts=None):
    """Resolves external dependencies for the given targets.

    If there are no targets suitable for jvm transitive dependency resolution, an empty result is
    returned.

    :param targets: The targets to resolve jvm dependencies for.
    :type targets: :class:`collections.Iterable` of :class:`pants.build_graph.target.Target`
    :param executor: A java executor to run ivy with.
    :type executor: :class:`pants.java.executor.Executor`

    :param confs: The ivy configurations to resolve; ('default',) by default.
    :type confs: :class:`collections.Iterable` of string
    :param extra_args: Any extra command line arguments to pass to ivy.
    :type extra_args: list of string
    :param bool invalidate_dependents: `True` to invalidate dependents of targets that needed to be
                                        resolved.
    :returns: The result of the resolve.
    :rtype: IvyResolveResult
    """
    # If there are no targets, we don't need to do a resolve.
    if not targets:
      return NO_RESOLVE_RUN_RESULT

    confs = confs or ('default',)

    fingerprint_strategy = IvyResolveFingerprintStrategy(confs)

    with self.invalidated(targets,
                          invalidate_dependents=invalidate_dependents,
                          silent=silent,
                          fingerprint_strategy=fingerprint_strategy) as invalidation_check:
      # In case all the targets were filtered out because they didn't participate in fingerprinting.
      if not invalidation_check.all_vts:
        return NO_RESOLVE_RUN_RESULT

      resolve_vts = VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts)

      resolve_hash_name = resolve_vts.cache_key.hash
      # NB: This used to be a global directory, but is now specific to each task that includes
      # this mixin.
      ivy_workdir = os.path.join(self.versioned_workdir, 'ivy')
      targets = resolve_vts.targets

      fetch = IvyFetchStep(confs,
                           resolve_hash_name,
                           pinned_artifacts,
                           self.get_options().soft_excludes,
                           self.ivy_resolution_cache_dir,
                           self.ivy_repository_cache_dir,
                           ivy_workdir)
      resolve = IvyResolveStep(confs,
                               resolve_hash_name,
                               pinned_artifacts,
                               self.get_options().soft_excludes,
                               self.ivy_resolution_cache_dir,
                               self.ivy_repository_cache_dir,
                               ivy_workdir)

      return self._perform_resolution(fetch, resolve, executor, extra_args, invalidation_check,
                                      resolve_vts, targets, workunit_name)
Exemplo n.º 52
0
  def resolve(self, targets, compile_classpath, sources, javadoc, executor):
    """
    This is the core function for coursier resolve.

    Validation strategy:

    1. All targets are going through the `invalidated` to get fingerprinted in the target level.
       No cache is fetched at this stage because it is disabled.
    2. Once each target is fingerprinted, we combine them into a `VersionedTargetSet` where they
       are fingerprinted together, because each run of 3rdparty resolve is context sensitive.

    Artifacts are stored in `VersionedTargetSet`'s results_dir, the contents are the aggregation of
    each coursier run happened within that context.

    Caching: (TODO): https://github.com/pantsbuild/pants/issues/5187
    Currently it is disabled due to absolute paths in the coursier results.

    :param targets: a collection of targets to do 3rdparty resolve against
    :param compile_classpath: classpath product that holds the resolution result. IMPORTANT: this parameter will be changed.
    :param sources: if True, fetch sources for 3rdparty
    :param javadoc: if True, fetch javadoc for 3rdparty
    :param executor: An instance of `pants.java.executor.Executor`. If None, a subprocess executor will be assigned.
    :return: n/a
    """
    manager = JarDependencyManagement.global_instance()

    jar_targets = manager.targets_by_artifact_set(targets)

    executor = executor or SubprocessExecutor(DistributionLocator.cached())
    if not isinstance(executor, Executor):
      raise ValueError('The executor argument must be an Executor instance, given {} of type {}'.format(
        executor, type(executor)))

    for artifact_set, target_subset in jar_targets.items():
      # TODO(wisechengyi): this is the only place we are using IvyUtil method, which isn't specific to ivy really.
      raw_jar_deps, global_excludes = IvyUtils.calculate_classpath(target_subset)

      # ['sources'] * False = [], ['sources'] * True = ['sources']
      confs_for_fingerprint = ['sources'] * sources + ['javadoc'] * javadoc
      fp_strategy = CoursierResolveFingerprintStrategy(confs_for_fingerprint)

      compile_classpath.add_excludes_for_targets(target_subset)

      with self.invalidated(target_subset,
                            invalidate_dependents=False,
                            silent=False,
                            fingerprint_strategy=fp_strategy) as invalidation_check:

        if not invalidation_check.all_vts:
          continue

        resolve_vts = VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts)

        vt_set_results_dir = self._prepare_vts_results_dir(resolve_vts)
        pants_jar_base_dir = self._prepare_workdir()
        coursier_cache_dir = CoursierSubsystem.global_instance().get_options().cache_dir

        # If a report is requested, do not proceed with loading validated result.
        if not self.get_options().report:
          # Check each individual target without context first
          # If the individuals are valid, check them as a VersionedTargetSet
          if not invalidation_check.invalid_vts and resolve_vts.valid:
            # Load up from the results dir
            success = self._load_from_results_dir(compile_classpath, vt_set_results_dir,
                                                  coursier_cache_dir, invalidation_check, pants_jar_base_dir)
            if success:
              return

        jars_to_resolve, pinned_coords = self._compute_jars_to_resolve_and_pin(raw_jar_deps,
                                                                               artifact_set,
                                                                               manager)

        results = self._get_result_from_coursier(jars_to_resolve, global_excludes, pinned_coords,
                                                 coursier_cache_dir, sources, javadoc, executor)

        for conf, result_list in results.items():
          for result in result_list:
            self._load_json_result(conf, compile_classpath, coursier_cache_dir, invalidation_check,
                                   pants_jar_base_dir, result, self._override_classifiers_for_conf(conf))

        self._populate_results_dir(vt_set_results_dir, results)
        resolve_vts.update()