def resolve_requirements(self, interpreter, req_libs):
    """Requirements resolution for PEX files.

    :param interpreter: Resolve against this :class:`PythonInterpreter`.
    :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve.
    :returns: a PEX containing target requirements and any specified python dist targets.
    """
    with self.invalidated(req_libs) as invalidation_check:
      # If there are no relevant targets, we still go through the motions of resolving
      # an empty set of requirements, to prevent downstream tasks from having to check
      # for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
            invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'

      # We need to ensure that we are resolving for only the current platform if we are
      # including local python dist targets that have native extensions.
      tgts = self.context.targets()
      maybe_platforms = ['current'] if build_for_current_platform_only_check(tgts) else None

      path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id))
      # Note that we check for the existence of the directory, instead of for invalid_vts,
      # to cover the empty case.
      if not os.path.isdir(path):
        with safe_concurrent_creation(path) as safe_path:
          builder = PEXBuilder(path=safe_path, interpreter=interpreter, copy=True)
          dump_requirement_libs(builder, interpreter, req_libs, self.context.log, platforms=maybe_platforms)
          builder.freeze()
    return PEX(path, interpreter=interpreter)
Exemplo n.º 2
0
  def nsutil_pex(self):
    interpreter = self.context.products.get_data(PythonInterpreter)
    chroot = os.path.join(self.workdir, 'nsutil', interpreter.version_string)
    if not os.path.exists(chroot):
      pex_info = PexInfo.default(interpreter=interpreter)
      with safe_concurrent_creation(chroot) as scratch:
        builder = PEXBuilder(path=scratch, interpreter=interpreter, pex_info=pex_info, copy=True)
        with temporary_file(binary_mode=False) as fp:
          declares_namespace_package_code = inspect.getsource(declares_namespace_package)
          fp.write(textwrap.dedent("""
            import sys


            {declares_namespace_package_code}


            if __name__ == '__main__':
              for path in sys.argv[1:]:
                if declares_namespace_package(path):
                  print(path)
          """).strip().format(declares_namespace_package_code=declares_namespace_package_code))
          fp.close()
          builder.set_executable(filename=fp.name, env_filename='main.py')
          builder.freeze()
    return PEX(pex=chroot, interpreter=interpreter)
Exemplo n.º 3
0
  def bootstrap_coursier(self, workunit_factory):

    opts = self.get_options()
    bootstrap_url = opts.bootstrap_jar_url

    coursier_bootstrap_dir = os.path.join(opts.pants_bootstrapdir,
                                          'tools', 'jvm', 'coursier',
                                          opts.version)

    bootstrap_jar_path = os.path.join(coursier_bootstrap_dir, 'coursier.jar')

    with workunit_factory(name='bootstrap-coursier', labels=[WorkUnitLabel.TOOL]) as workunit:

      if not os.path.exists(bootstrap_jar_path):
        with safe_concurrent_creation(bootstrap_jar_path) as temp_path:
          fetcher = Fetcher(get_buildroot())
          checksummer = fetcher.ChecksumListener(digest=hashlib.sha1())
          try:
            logger.info('\nDownloading {}'.format(bootstrap_url))
            # TODO: Capture the stdout of the fetcher, instead of letting it output
            # to the console directly.
            fetcher.download(bootstrap_url,
                             listener=fetcher.ProgressListener().wrap(checksummer),
                             path_or_fd=temp_path,
                             timeout_secs=opts.bootstrap_fetch_timeout_secs)
            logger.info('sha1: {}'.format(checksummer.checksum))
          except fetcher.Error as e:
            workunit.set_outcome(WorkUnit.FAILURE)
            raise self.Error('Problem fetching the coursier bootstrap jar! {}'.format(e))
          else:
            workunit.set_outcome(WorkUnit.SUCCESS)

      return bootstrap_jar_path
Exemplo n.º 4
0
 def test_safe_concurrent_creation(self):
   with temporary_dir() as td:
     expected_file = os.path.join(td, 'expected_file')
     with safe_concurrent_creation(expected_file) as tmp_expected_file:
       os.mkdir(tmp_expected_file)
       self.assertTrue(os.path.exists(tmp_expected_file))
       self.assertFalse(os.path.exists(expected_file))
     self.assertTrue(os.path.exists(expected_file))
Exemplo n.º 5
0
 def _get_matching_wheel(self, pex_path, interpreter, requirements, module_name):
   """Use PexBuilderWrapper to resolve a single wheel from the requirement specs using pex."""
   with self.context.new_workunit('extract-native-wheels'):
     with safe_concurrent_creation(pex_path) as chroot:
       pex_builder = PexBuilderWrapper.Factory.create(
         builder=PEXBuilder(path=chroot, interpreter=interpreter),
         log=self.context.log)
       return pex_builder.extract_single_dist_for_current_platform(requirements, module_name)
Exemplo n.º 6
0
 def _build_tool_pex(self, tool_subsystem, interpreter, pex_path):
   with safe_concurrent_creation(pex_path) as chroot:
     pex_builder = PexBuilderWrapper.Factory.create(
       builder=PEXBuilder(path=chroot, interpreter=interpreter),
       log=self.context.log)
     reqs = [PythonRequirement(r) for r in tool_subsystem.get_requirement_specs()]
     pex_builder.add_resolved_requirements(reqs=reqs, platforms=['current'])
     pex_builder.set_entry_point(tool_subsystem.get_entry_point())
     pex_builder.freeze()
Exemplo n.º 7
0
 def _get_matching_wheel(self, pex_path, interpreter, requirements,
                         module_name):
     """Use PexBuilderWrapper to resolve a single wheel from the requirement specs using pex."""
     with self.context.new_workunit('extract-native-wheels'):
         with safe_concurrent_creation(pex_path) as chroot:
             pex_builder = PexBuilderWrapper.Factory.create(
                 builder=PEXBuilder(path=chroot, interpreter=interpreter),
                 log=self.context.log)
             return pex_builder.extract_single_dist_for_current_platform(
                 requirements, module_name)
Exemplo n.º 8
0
 def _resolve_requirements_for_versioned_target_closure(self, interpreter, vt):
   reqs_pex_path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity),
                                                 vt.cache_key.hash))
   if not os.path.isdir(reqs_pex_path):
     req_libs = filter(has_python_requirements, vt.target.closure())
     with safe_concurrent_creation(reqs_pex_path) as safe_path:
       builder = PEXBuilder(safe_path, interpreter=interpreter, copy=True)
       dump_requirement_libs(builder, interpreter, req_libs, self.context.log)
       builder.freeze()
   return PEX(reqs_pex_path, interpreter=interpreter)
Exemplo n.º 9
0
    def test_safe_concurrent_creation_noop(self):
        with temporary_dir() as td:
            expected_file = os.path.join(td, 'parent_dir', 'expected_file')

            # Ensure safe_concurrent_creation() doesn't bomb if we don't write the expected files.
            with safe_concurrent_creation(expected_file):
                pass

            self.assertFalse(os.path.exists(expected_file))
            self.assertTrue(os.path.exists(os.path.dirname(expected_file)))
Exemplo n.º 10
0
 def build_isort_pex(cls, context, interpreter, pex_path,
                     requirements_lib):
     with safe_concurrent_creation(pex_path) as chroot:
         builder = PEXBuilder(path=chroot, interpreter=interpreter)
         dump_requirement_libs(builder=builder,
                               interpreter=interpreter,
                               req_libs=[requirements_lib],
                               log=context.log)
         builder.set_script('isort')
         builder.freeze()
Exemplo n.º 11
0
 def _resolve_requirements_for_versioned_target_closure(self, interpreter, vt):
   reqs_pex_path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity),
                                                 vt.cache_key.hash))
   if not os.path.isdir(reqs_pex_path):
     req_libs = filter(has_python_requirements, vt.target.closure())
     with safe_concurrent_creation(reqs_pex_path) as safe_path:
       builder = PEXBuilder(safe_path, interpreter=interpreter, copy=True)
       dump_requirement_libs(builder, interpreter, req_libs, self.context.log)
       builder.freeze()
   return PEX(reqs_pex_path, interpreter=interpreter)
Exemplo n.º 12
0
  def test_safe_concurrent_creation_noop(self):
    with temporary_dir() as td:
      expected_file = os.path.join(td, 'parent_dir', 'expected_file')

      # Ensure safe_concurrent_creation() doesn't bomb if we don't write the expected files.
      with safe_concurrent_creation(expected_file):
        pass

      self.assertFalse(os.path.exists(expected_file))
      self.assertTrue(os.path.exists(os.path.dirname(expected_file)))
Exemplo n.º 13
0
    def test_safe_concurrent_creation_noop(self) -> None:
        with temporary_dir() as td:
            expected_file = os.path.join(td, "parent_dir", "expected_file")

            # Ensure safe_concurrent_creation() doesn't bomb if we don't write the expected files.
            with safe_concurrent_creation(expected_file):
                pass

            assert not os.path.exists(expected_file)
            assert os.path.exists(os.path.dirname(expected_file))
Exemplo n.º 14
0
 def _generate_requirements_pex(self, pex_path, interpreter, requirements):
   if not os.path.exists(pex_path):
     with self.context.new_workunit('extract-native-wheels'):
       with safe_concurrent_creation(pex_path) as chroot:
         pex_builder = PexBuilderWrapper.Factory.create(
           builder=PEXBuilder(path=chroot, interpreter=interpreter),
           log=self.context.log)
         pex_builder.add_resolved_requirements(requirements)
         pex_builder.freeze()
   return PEX(pex_path, interpreter=interpreter)
Exemplo n.º 15
0
    def checker_pex(self, interpreter):
        # TODO(John Sirois): Formalize in pants.base?
        pants_dev_mode = os.environ.get('PANTS_DEV')

        if pants_dev_mode:
            checker_id = self.checker_target.transitive_invalidation_hash()
        else:
            checker_id = hash_all([self._CHECKER_REQ])

        pex_path = os.path.join(self.workdir, 'checker', checker_id,
                                str(interpreter.identity))

        if not os.path.exists(pex_path):
            with self.context.new_workunit(name='build-checker'):
                with safe_concurrent_creation(pex_path) as chroot:
                    pex_builder = PexBuilderWrapper(
                        PEXBuilder(path=chroot, interpreter=interpreter),
                        PythonRepos.global_instance(),
                        PythonSetup.global_instance(), self.context.log)

                    # Constraining is required to guard against the case where the user
                    # has a pexrc file set.
                    pex_builder.add_interpreter_constraint(
                        str(interpreter.identity.requirement))

                    if pants_dev_mode:
                        pex_builder.add_sources_from(self.checker_target)
                        req_libs = [
                            tgt for tgt in self.checker_target.closure()
                            if isinstance(tgt, PythonRequirementLibrary)
                        ]

                        pex_builder.add_requirement_libs_from(
                            req_libs=req_libs)
                    else:
                        try:
                            # The checker is already on sys.path, eg: embedded in pants.pex.
                            working_set = WorkingSet(entries=sys.path)
                            for dist in working_set.resolve(
                                [Requirement.parse(self._CHECKER_REQ)]):
                                pex_builder.add_direct_requirements(
                                    dist.requires())
                                pex_builder.add_distribution(dist)
                            pex_builder.add_direct_requirements(
                                [self._CHECKER_REQ])
                        except DistributionNotFound:
                            # We need to resolve the checker from a local or remote distribution repo.
                            pex_builder.add_resolved_requirements(
                                [PythonRequirement(self._CHECKER_REQ)])

                    pex_builder.set_entry_point(self._CHECKER_ENTRYPOINT)
                    pex_builder.freeze()

        return PEX(pex_path, interpreter=interpreter)
Exemplo n.º 16
0
 def build_isort_pex(cls, context, interpreter, pex_path,
                     requirements_lib):
     with safe_concurrent_creation(pex_path) as chroot:
         pex_builder = PexBuilderWrapper(
             PEXBuilder(path=chroot, interpreter=interpreter),
             PythonRepos.global_instance(),
             PythonSetup.global_instance(), context.log)
         pex_builder.add_requirement_libs_from(
             req_libs=[requirements_lib])
         pex_builder.set_script('isort')
         pex_builder.freeze()
Exemplo n.º 17
0
 def dump_to_file(cls, filename, resolutions_by_conf):
   res = {}
   for conf, resolution in resolutions_by_conf.items():
     res[conf] = OrderedDict([
       ['target_to_coords',resolution.target_spec_to_coordinate_strings()],
       ['coord_to_attrs', OrderedDict([str(c), attrs]
                                      for c, attrs in resolution.coordinate_to_attributes.items())]
     ])
   with safe_concurrent_creation(filename) as tmp_filename:
     with open(tmp_filename, 'wb') as f:
       json.dump(res, f)
Exemplo n.º 18
0
 def _build_tool_pex(self, context, interpreter, pex_path,
                     requirements_lib):
     with safe_concurrent_creation(pex_path) as chroot:
         pex_builder = PexBuilderWrapper(
             PEXBuilder(path=chroot, interpreter=interpreter),
             PythonRepos.global_instance(), PythonSetup.global_instance(),
             context.log)
         pex_builder.add_requirement_libs_from(req_libs=[requirements_lib])
         pex_builder.set_entry_point(
             self._tool_subsystem().get_entry_point())
         pex_builder.freeze()
Exemplo n.º 19
0
 def _resolve_requirements_for_versioned_target_closure(self, interpreter, vt):
   reqs_pex_path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity),
                                                 vt.cache_key.hash))
   if not os.path.isdir(reqs_pex_path):
     req_libs = [t for t in vt.target.closure() if has_python_requirements(t)]
     with safe_concurrent_creation(reqs_pex_path) as safe_path:
       pex_builder = PexBuilderWrapper.Factory.create(
         builder=PEXBuilder(safe_path, interpreter=interpreter, copy=True),
         log=self.context.log)
       pex_builder.add_requirement_libs_from(req_libs)
       pex_builder.freeze()
   return PEX(reqs_pex_path, interpreter=interpreter)
Exemplo n.º 20
0
    def test_safe_concurrent_creation_exception_still_renames(self):
        with temporary_dir() as td:
            expected_file = os.path.join(td, 'expected_file')

            with self.assertRaises(ZeroDivisionError):
                with safe_concurrent_creation(expected_file) as safe_path:
                    os.mkdir(safe_path)
                    self.assertTrue(os.path.exists(safe_path))
                    raise ZeroDivisionError('zomg')

            self.assertFalse(os.path.exists(safe_path))
            self.assertTrue(os.path.exists(expected_file))
Exemplo n.º 21
0
 def write_resolve_report(self, frozen_resolve_file, partition_name):
   safe_mkdir(self.resolution_report_outdir)
   out_file = os.path.join(self.resolution_report_outdir, partition_name + '.json')
   try:
     with open(frozen_resolve_file) as fp:
       # Order of jars in the json matches transitive dep, so preserve in case we need the signal some day.
       parsed = json.load(fp, object_pairs_hook=OrderedDict)
       with safe_concurrent_creation(out_file) as tmp_filename:
         with open(tmp_filename, 'wb') as f:
           json.dump(parsed, f, indent=4)
   except IOError as e:
     raise TaskError('Failed to dump resolution report to {}: {}'.format(out_file, e))
Exemplo n.º 22
0
  def test_safe_concurrent_creation_exception_handling(self):
    with temporary_dir() as td:
      expected_file = os.path.join(td, 'expected_file')

      with self.assertRaises(ZeroDivisionError):
        with safe_concurrent_creation(expected_file) as safe_path:
          os.mkdir(safe_path)
          self.assertTrue(os.path.exists(safe_path))
          raise ZeroDivisionError('zomg')

      self.assertFalse(os.path.exists(safe_path))
      self.assertFalse(os.path.exists(expected_file))
Exemplo n.º 23
0
 def _resolve_requirements_for_versioned_target_closure(self, interpreter, vt):
   reqs_pex_path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity),
                                                 vt.cache_key.hash))
   if not os.path.isdir(reqs_pex_path):
     req_libs = [t for t in vt.target.closure() if has_python_requirements(t)]
     with safe_concurrent_creation(reqs_pex_path) as safe_path:
       pex_builder = PexBuilderWrapper.Factory.create(
         builder=PEXBuilder(safe_path, interpreter=interpreter, copy=True),
         log=self.context.log)
       pex_builder.add_requirement_libs_from(req_libs)
       pex_builder.freeze()
   return PEX(reqs_pex_path, interpreter=interpreter)
Exemplo n.º 24
0
    def test_safe_concurrent_creation_exception_handling(self) -> None:
        with temporary_dir() as td:
            expected_file = os.path.join(td, "expected_file")

            with self.assertRaises(ZeroDivisionError):
                with safe_concurrent_creation(expected_file) as safe_path:
                    os.mkdir(safe_path)
                    self.assertTrue(os.path.exists(safe_path))
                    raise ZeroDivisionError("zomg")

            self.assertFalse(os.path.exists(safe_path))
            self.assertFalse(os.path.exists(expected_file))
Exemplo n.º 25
0
  def create_pex(self, pex_info=None):
    """Returns a wrapped pex that "merges" the other pexes via PEX_PATH."""
    relevant_targets = self.context.targets(
      lambda tgt: isinstance(tgt, (PythonRequirementLibrary, PythonTarget, Resources)))
    with self.invalidated(relevant_targets) as invalidation_check:

      # If there are no relevant targets, we still go through the motions of resolving
      # an empty set of requirements, to prevent downstream tasks from having to check
      # for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
          invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'

      interpreter = self.context.products.get_data(PythonInterpreter)
      path = os.path.join(self.workdir, str(interpreter.identity), target_set_id)
      extra_pex_paths_file_path = path + '.extra_pex_paths'
      extra_pex_paths = None

      # Note that we check for the existence of the directory, instead of for invalid_vts,
      # to cover the empty case.
      if not os.path.isdir(path):
        pexes = [
          self.context.products.get_data(ResolveRequirements.REQUIREMENTS_PEX),
          self.context.products.get_data(GatherSources.PYTHON_SOURCES)
        ]

        if self.extra_requirements():
          extra_reqs = [PythonRequirement(req_str) for req_str in self.extra_requirements()]
          addr = Address.parse('{}_extra_reqs'.format(self.__class__.__name__))
          self.context.build_graph.inject_synthetic_target(
            addr, PythonRequirementLibrary, requirements=extra_reqs)
          # Add the extra requirements first, so they take precedence over any colliding version
          # in the target set's dependency closure.
          pexes = [self.resolve_requirements([self.context.build_graph.get_target(addr)])] + pexes

        extra_pex_paths = [pex.path() for pex in pexes if pex]

        with safe_concurrent_creation(path) as safe_path:
          builder = PEXBuilder(safe_path, interpreter, pex_info=pex_info)
          builder.freeze()

        with open(extra_pex_paths_file_path, 'w') as outfile:
          for epp in extra_pex_paths:
            outfile.write(epp)
            outfile.write(b'\n')

    if extra_pex_paths is None:
      with open(extra_pex_paths_file_path, 'r') as infile:
        extra_pex_paths = [p.strip() for p in infile.readlines()]
    return WrappedPEX(PEX(os.path.realpath(path), interpreter), extra_pex_paths, interpreter)
Exemplo n.º 26
0
  def create_pex(self, pex_info=None):
    """Returns a wrapped pex that "merges" the other pexes via PEX_PATH."""
    relevant_targets = self.context.targets(
      lambda tgt: isinstance(tgt, (PythonRequirementLibrary, PythonTarget, Resources)))
    with self.invalidated(relevant_targets) as invalidation_check:

      # If there are no relevant targets, we still go through the motions of resolving
      # an empty set of requirements, to prevent downstream tasks from having to check
      # for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
          invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'

      interpreter = self.context.products.get_data(PythonInterpreter)
      path = os.path.join(self.workdir, str(interpreter.identity), target_set_id)
      extra_pex_paths_file_path = path + '.extra_pex_paths'
      extra_pex_paths = None

      # Note that we check for the existence of the directory, instead of for invalid_vts,
      # to cover the empty case.
      if not os.path.isdir(path):
        pexes = [
          self.context.products.get_data(ResolveRequirements.REQUIREMENTS_PEX),
          self.context.products.get_data(GatherSources.PYTHON_SOURCES)
        ]

        if self.extra_requirements():
          extra_reqs = [PythonRequirement(req_str) for req_str in self.extra_requirements()]
          addr = Address.parse('{}_extra_reqs'.format(self.__class__.__name__))
          self.context.build_graph.inject_synthetic_target(
            addr, PythonRequirementLibrary, requirements=extra_reqs)
          # Add the extra requirements first, so they take precedence over any colliding version
          # in the target set's dependency closure.
          pexes = [self.resolve_requirements([self.context.build_graph.get_target(addr)])] + pexes

        extra_pex_paths = [pex.path() for pex in pexes if pex]

        with safe_concurrent_creation(path) as safe_path:
          builder = PEXBuilder(safe_path, interpreter, pex_info=pex_info)
          builder.freeze()

        with open(extra_pex_paths_file_path, 'w') as outfile:
          for epp in extra_pex_paths:
            outfile.write(epp)
            outfile.write(b'\n')

    if extra_pex_paths is None:
      with open(extra_pex_paths_file_path, 'r') as infile:
        extra_pex_paths = [p.strip() for p in infile.readlines()]
    return WrappedPEX(PEX(os.path.realpath(path), interpreter), extra_pex_paths, interpreter)
Exemplo n.º 27
0
  def checker_pex(self, interpreter):
    # TODO(John Sirois): Formalize in pants.base?
    pants_dev_mode = os.environ.get('PANTS_DEV')

    if pants_dev_mode:
      checker_id = self.checker_target.transitive_invalidation_hash()
    else:
      checker_id = hash_all([self._CHECKER_REQ])

    pex_path = os.path.join(self.workdir, 'checker', checker_id, str(interpreter.identity))

    if not os.path.exists(pex_path):
      with self.context.new_workunit(name='build-checker'):
        with safe_concurrent_creation(pex_path) as chroot:
          pex_builder = PexBuilderWrapper.Factory.create(
            builder=PEXBuilder(path=chroot, interpreter=interpreter),
            log=self.context.log)

          # Constraining is required to guard against the case where the user
          # has a pexrc file set.
          pex_builder.add_interpreter_constraint(str(interpreter.identity.requirement))

          if pants_dev_mode:
            pex_builder.add_sources_from(self.checker_target)
            req_libs = [tgt for tgt in self.checker_target.closure()
                        if isinstance(tgt, PythonRequirementLibrary)]

            pex_builder.add_requirement_libs_from(req_libs=req_libs)
          else:
            try:
              # The checker is already on sys.path, eg: embedded in pants.pex.
              platform = Platform.current()
              platform_name = platform.platform
              env = Environment(search_path=sys.path,
                                platform=platform_name,
                                python=interpreter.version_string)
              working_set = WorkingSet(entries=sys.path)
              for dist in working_set.resolve([Requirement.parse(self._CHECKER_REQ)], env=env):
                pex_builder.add_direct_requirements(dist.requires())
                # NB: We add the dist location instead of the dist itself to make sure its a
                # distribution style pex knows how to package.
                pex_builder.add_dist_location(dist.location)
              pex_builder.add_direct_requirements([self._CHECKER_REQ])
            except (DistributionNotFound, PEXBuilder.InvalidDistribution):
              # We need to resolve the checker from a local or remote distribution repo.
              pex_builder.add_resolved_requirements(
                [PythonRequirement(self._CHECKER_REQ)])

          pex_builder.set_entry_point(self._CHECKER_ENTRYPOINT)
          pex_builder.freeze()

    return PEX(pex_path, interpreter=interpreter)
Exemplo n.º 28
0
  def merge_pexes(cls, path, pex_info, interpreter, pexes, interpeter_constraints=None):
    """Generates a merged pex at path."""
    pex_paths = [pex.path() for pex in pexes if pex]
    if pex_paths:
      pex_info = pex_info.copy()
      pex_info.merge_pex_path(':'.join(pex_paths))

    with safe_concurrent_creation(path) as safe_path:
      builder = PEXBuilder(safe_path, interpreter, pex_info=pex_info)
      if interpeter_constraints:
        for constraint in interpeter_constraints:
          builder.add_interpreter_constraint(constraint)
      builder.freeze()
Exemplo n.º 29
0
 def _build_tool_pex(self, tool_subsystem, interpreter, pex_path):
     with safe_concurrent_creation(pex_path) as chroot:
         pex_builder = PexBuilderWrapper.Factory.create(
             builder=PEXBuilder(path=chroot, interpreter=interpreter),
             log=self.context.log)
         reqs = [
             PythonRequirement(r)
             for r in tool_subsystem.get_requirement_specs()
         ]
         pex_builder.add_resolved_requirements(reqs=reqs,
                                               platforms=["current"])
         pex_builder.set_entry_point(tool_subsystem.get_entry_point())
         pex_builder.freeze()
Exemplo n.º 30
0
  def merge_pexes(cls, path, pex_info, interpreter, pexes, interpeter_constraints=None):
    """Generates a merged pex at path."""
    pex_paths = [pex.path() for pex in pexes if pex]
    if pex_paths:
      pex_info = pex_info.copy()
      pex_info.merge_pex_path(':'.join(pex_paths))

    with safe_concurrent_creation(path) as safe_path:
      builder = PEXBuilder(safe_path, interpreter, pex_info=pex_info)
      if interpeter_constraints:
        for constraint in interpeter_constraints:
          builder.add_interpreter_constraint(constraint)
      builder.freeze()
Exemplo n.º 31
0
 def bootstrap_conan(self):
   pex_info = PexInfo.default()
   pex_info.entry_point = 'conans.conan'
   conan_bootstrap_dir = os.path.join(get_pants_cachedir(), 'conan_support')
   conan_pex_path = os.path.join(conan_bootstrap_dir, 'conan_binary')
   interpreter = PythonInterpreter.get()
   if not os.path.exists(conan_pex_path):
     with safe_concurrent_creation(conan_pex_path) as safe_path:
       builder = PEXBuilder(safe_path, interpreter, pex_info=pex_info)
       reqs = [PythonRequirement(req) for req in self.get_options().conan_requirements]
       dump_requirements(builder, interpreter, reqs, logger)
       builder.freeze()
   conan_binary = PEX(conan_pex_path, interpreter)
   return self.ConanBinary(pex=conan_binary)
Exemplo n.º 32
0
  def _fetch_binary(self, name, binary_path):
    bootstrap_dir = os.path.realpath(os.path.expanduser(self._pants_bootstrapdir))
    bootstrapped_binary_path = os.path.join(bootstrap_dir, binary_path)
    if not os.path.exists(bootstrapped_binary_path):
      with safe_concurrent_creation(bootstrapped_binary_path) as downloadpath:
        with self._select_binary_stream(name, binary_path) as stream:
          with safe_open(downloadpath, 'wb') as bootstrapped_binary:
            bootstrapped_binary.write(stream())
          os.rename(downloadpath, bootstrapped_binary_path)
          chmod_plus_x(bootstrapped_binary_path)

    logger.debug('Selected {binary} binary bootstrapped to: {path}'
                 .format(binary=name, path=bootstrapped_binary_path))
    return bootstrapped_binary_path
Exemplo n.º 33
0
 def _get_pex_for_versioned_targets(self, interpreter, versioned_targets):
   if versioned_targets:
     target_set_id = VersionedTargetSet.from_versioned_targets(versioned_targets).cache_key.hash
   else:
     raise TaskError("Can't create pex in gather_sources: No python targets provided")
   source_pex_path = os.path.realpath(os.path.join(self.workdir, target_set_id))
   # Note that we check for the existence of the directory, instead of for invalid_vts,
   # to cover the empty case.
   if not os.path.isdir(source_pex_path):
     # Note that we use the same interpreter for all targets: We know the interpreter
     # is compatible (since it's compatible with all targets in play).
     with safe_concurrent_creation(source_pex_path) as safe_path:
       self._build_pex(interpreter, safe_path, [vt.target for vt in versioned_targets])
   return PEX(source_pex_path, interpreter=interpreter)
Exemplo n.º 34
0
  def _fetch_binary(self, name, binary_path):
    bootstrap_dir = os.path.realpath(os.path.expanduser(self._pants_bootstrapdir))
    bootstrapped_binary_path = os.path.join(bootstrap_dir, binary_path)
    if not os.path.exists(bootstrapped_binary_path):
      with safe_concurrent_creation(bootstrapped_binary_path) as downloadpath:
        with self._select_binary_stream(name, binary_path) as stream:
          with safe_open(downloadpath, 'wb') as bootstrapped_binary:
            bootstrapped_binary.write(stream())
          os.rename(downloadpath, bootstrapped_binary_path)
          chmod_plus_x(bootstrapped_binary_path)

    logger.debug('Selected {binary} binary bootstrapped to: {path}'
                 .format(binary=name, path=bootstrapped_binary_path))
    return bootstrapped_binary_path
Exemplo n.º 35
0
    def _get_matching_wheel(self, pex_path, interpreter, requirements,
                            module_name):
        """Use PexBuilderWrapper to resolve a single wheel from the requirement specs using pex.

        N.B.: The resolved wheel is already "unpacked" by PEX. More accurately, it's installed in a
        chroot.
        """
        with self.context.new_workunit("extract-native-wheels"):
            with safe_concurrent_creation(pex_path) as chroot:
                pex_builder = PexBuilderWrapper.Factory.create(
                    builder=PEXBuilder(path=chroot, interpreter=interpreter),
                    log=self.context.log)

                return pex_builder.extract_single_dist_for_current_platform(
                    requirements, dist_key=module_name)
Exemplo n.º 36
0
  def bootstrap(self, interpreter, pex_file_path, extra_reqs=None):
    # Caching is done just by checking if the file at the specified path is already executable.
    if not is_executable(pex_file_path):
      pex_info = PexInfo.default(interpreter=interpreter)
      if self.entry_point is not None:
        pex_info.entry_point = self.entry_point

      with safe_concurrent_creation(pex_file_path) as safe_path:
        all_reqs = list(self.base_requirements) + list(extra_reqs or [])
        pex_builder = PexBuilderWrapper.Factory.create(
          builder=PEXBuilder(interpreter=interpreter, pex_info=pex_info))
        pex_builder.add_resolved_requirements(all_reqs, platforms=['current'])
        pex_builder.build(safe_path)

    return PEX(pex_file_path, interpreter)
Exemplo n.º 37
0
  def _map_fetched_remote_source(self, go_remote_lib, gopath, all_known_remote_libs,
                                 resolved_remote_libs, undeclared_deps, import_root_map):
    # See if we've computed the remote import paths for this rev of this lib in a previous run.
    remote_import_paths_cache = os.path.join(os.path.dirname(gopath), 'remote_import_paths.txt')
    if os.path.exists(remote_import_paths_cache):
      with open(remote_import_paths_cache, 'r') as fp:
        remote_import_paths = [line.strip() for line in fp.readlines()]
    else:
      remote_import_paths = self._get_remote_import_paths(go_remote_lib.import_path,
                                                          gopath=gopath)
      with safe_concurrent_creation(remote_import_paths_cache) as safe_path:
        with open(safe_path, 'w') as fp:
          for path in remote_import_paths:
            fp.write(f'{path}\n')

    for remote_import_path in remote_import_paths:
      remote_root = import_root_map.get(remote_import_path)
      if remote_root is None:
        fetcher = self._get_fetcher(remote_import_path)
        remote_root = fetcher.root()
        import_root_map[remote_import_path] = remote_root

      spec_path = os.path.join(go_remote_lib.target_base, remote_root)

      package_path = GoRemoteLibrary.remote_package_path(remote_root, remote_import_path)
      target_name = package_path or os.path.basename(remote_root)

      address = Address(spec_path, target_name)
      if not any(address == lib.address for lib in all_known_remote_libs):
        try:
          # If we've already resolved a package from this remote root, its ok to define an
          # implicit synthetic remote target for all other packages in the same remote root.
          same_remote_libs = [lib for lib in all_known_remote_libs
                              if spec_path == lib.address.spec_path]
          implicit_ok = any(same_remote_libs)

          # If we're creating a synthetic remote target, we should pin it to the same
          # revision as the rest of the library.
          rev = None
          if implicit_ok:
            rev = same_remote_libs[0].rev

          remote_lib = self._resolve(go_remote_lib, address, package_path, rev, implicit_ok)
          resolved_remote_libs.add(remote_lib)
          all_known_remote_libs.add(remote_lib)
        except self.UndeclaredRemoteLibError as e:
          undeclared_deps[go_remote_lib].add((remote_import_path, e.address))
      self.context.build_graph.inject_dependency(go_remote_lib.address, address)
Exemplo n.º 38
0
  def _map_fetched_remote_source(self, go_remote_lib, gopath, all_known_remote_libs,
                                 resolved_remote_libs, undeclared_deps, import_root_map):
    # See if we've computed the remote import paths for this rev of this lib in a previous run.
    remote_import_paths_cache = os.path.join(os.path.dirname(gopath), 'remote_import_paths.txt')
    if os.path.exists(remote_import_paths_cache):
      with open(remote_import_paths_cache, 'r') as fp:
        remote_import_paths = [line.decode('utf8').strip() for line in fp.readlines()]
    else:
      remote_import_paths = self._get_remote_import_paths(go_remote_lib.import_path,
                                                          gopath=gopath)
      with safe_concurrent_creation(remote_import_paths_cache) as safe_path:
        with open(safe_path, 'w') as fp:
          for path in remote_import_paths:
            fp.write('{}\n'.format(path).encode('utf8'))

    for remote_import_path in remote_import_paths:
      remote_root = import_root_map.get(remote_import_path)
      if remote_root is None:
        fetcher = self._get_fetcher(remote_import_path)
        remote_root = fetcher.root()
        import_root_map[remote_import_path] = remote_root

      spec_path = os.path.join(go_remote_lib.target_base, remote_root)

      package_path = GoRemoteLibrary.remote_package_path(remote_root, remote_import_path)
      target_name = package_path or os.path.basename(remote_root)

      address = Address(spec_path, target_name)
      if not any(address == lib.address for lib in all_known_remote_libs):
        try:
          # If we've already resolved a package from this remote root, its ok to define an
          # implicit synthetic remote target for all other packages in the same remote root.
          same_remote_libs = [lib for lib in all_known_remote_libs
                              if spec_path == lib.address.spec_path]
          implicit_ok = any(same_remote_libs)

          # If we're creating a synthetic remote target, we should pin it to the same
          # revision as the rest of the library.
          rev = None
          if implicit_ok:
            rev = same_remote_libs[0].rev

          remote_lib = self._resolve(go_remote_lib, address, package_path, rev, implicit_ok)
          resolved_remote_libs.add(remote_lib)
          all_known_remote_libs.add(remote_lib)
        except self.UndeclaredRemoteLibError as e:
          undeclared_deps[go_remote_lib].add((remote_import_path, e.address))
      self.context.build_graph.inject_dependency(go_remote_lib.address, address)
Exemplo n.º 39
0
  def merged_pex(cls, path, pex_info, interpreter, pexes, interpeter_constraints=None):
    """Yields a pex builder at path with the given pexes already merged.

    :rtype: :class:`pex.pex_builder.PEXBuilder`
    """
    pex_paths = [pex.path() for pex in pexes if pex]
    if pex_paths:
      pex_info = pex_info.copy()
      pex_info.merge_pex_path(':'.join(pex_paths))

    with safe_concurrent_creation(path) as safe_path:
      builder = PEXBuilder(safe_path, interpreter, pex_info=pex_info)
      if interpeter_constraints:
        for constraint in interpeter_constraints:
          builder.add_interpreter_constraint(constraint)
      yield builder
Exemplo n.º 40
0
  def write_resolve_report(self, frozen_resolve_file, partition_name):
    safe_mkdir(self.resolution_report_outdir)
    out_file = os.path.join(self.resolution_report_outdir, partition_name + '.json')
    try:
      with open(frozen_resolve_file) as fp:
        # We are alphabetizing the 3rdparty names and their resolved coordinateds to get a stable diff in the SCM.
        parsed = json.load(fp, object_pairs_hook=OrderedDict)

        for target, coords in parsed['default']['target_to_coords'].items():
          parsed['default']['target_to_coords'][target] = sorted(coords)
        parsed = OrderedDict(sorted(parsed['default']['target_to_coords'].items()))

        with safe_concurrent_creation(out_file) as tmp_filename:
          with open(tmp_filename, 'wb') as f:
            json.dump(parsed, f, indent=4)
    except IOError as e:
      raise TaskError('Failed to dump resolution report to {}: {}'.format(out_file, e))
Exemplo n.º 41
0
 def _get_pex_for_versioned_targets(self, interpreter, versioned_targets):
   if versioned_targets:
     target_set_id = VersionedTargetSet.from_versioned_targets(versioned_targets).cache_key.hash
   else:
     # If there are no relevant targets, we still go through the motions of gathering
     # an empty set of sources, to prevent downstream tasks from having to check
     # for this special case.
     target_set_id = 'no_targets'
   source_pex_path = os.path.realpath(os.path.join(self.workdir, target_set_id))
   # Note that we check for the existence of the directory, instead of for invalid_vts,
   # to cover the empty case.
   if not os.path.isdir(source_pex_path):
     # Note that we use the same interpreter for all targets: We know the interpreter
     # is compatible (since it's compatible with all targets in play).
     with safe_concurrent_creation(source_pex_path) as safe_path:
       self._build_pex(interpreter, safe_path, [vt.target for vt in versioned_targets])
   return PEX(source_pex_path, interpreter=interpreter)
Exemplo n.º 42
0
 def _get_pex_for_versioned_targets(self, interpreter, versioned_targets):
   if versioned_targets:
     target_set_id = VersionedTargetSet.from_versioned_targets(versioned_targets).cache_key.hash
   else:
     # If there are no relevant targets, we still go through the motions of gathering
     # an empty set of sources, to prevent downstream tasks from having to check
     # for this special case.
     target_set_id = 'no_targets'
   source_pex_path = os.path.realpath(os.path.join(self.workdir, target_set_id))
   # Note that we check for the existence of the directory, instead of for invalid_vts,
   # to cover the empty case.
   if not os.path.isdir(source_pex_path):
     # Note that we use the same interpreter for all targets: We know the interpreter
     # is compatible (since it's compatible with all targets in play).
     with safe_concurrent_creation(source_pex_path) as safe_path:
       self._build_pex(interpreter, safe_path, [vt.target for vt in versioned_targets])
   return PEX(source_pex_path, interpreter=interpreter)
    def resolve_requirements(self, interpreter, req_libs):
        """Requirements resolution for PEX files.

        NB: This method always resolve all requirements in `req_libs` for the 'current' platform! Tasks
        such as PythonBinaryCreate which export code meant for other machines to run will need to
        resolve against the platforms specified by the target or via pants options.

        :param interpreter: Resolve against this :class:`PythonInterpreter`.
        :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve.
        :returns: a PEX containing target requirements and any specified python dist targets.
        """
        with self.invalidated(req_libs) as invalidation_check:
            # If there are no relevant targets, we still go through the motions of resolving
            # an empty set of requirements, to prevent downstream tasks from having to check
            # for this special case.
            if invalidation_check.all_vts:
                target_set_id = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts).cache_key.hash
            else:
                target_set_id = "no_targets"

            # NB: Since PythonBinaryCreate is the only task that exports python code for use outside the
            # host system, it's the only python task that needs to resolve for non-'current'
            # platforms. PythonBinaryCreate will actually validate the platforms itself when resolving
            # requirements, instead of using this method, so we can always resolve for 'current' here in
            # order to pull in any binary or universal dists needed for the currently executing host.
            platforms = ["current"]

            path = os.path.realpath(
                os.path.join(self.workdir, str(interpreter.identity),
                             target_set_id))
            # Note that we check for the existence of the directory, instead of for invalid_vts,
            # to cover the empty case.
            if not os.path.isdir(path):
                with safe_concurrent_creation(path) as safe_path:
                    pex_builder = PexBuilderWrapper.Factory.create(
                        builder=PEXBuilder(path=safe_path,
                                           interpreter=interpreter,
                                           copy=True),
                        log=self.context.log,
                    )
                    pex_builder.add_requirement_libs_from(req_libs,
                                                          platforms=platforms)
                    pex_builder.freeze()
        return PEX(path, interpreter=interpreter)
Exemplo n.º 44
0
  def resolve_requirement_strings(self, interpreter, requirement_strings):
    """Resolve a list of pip-style requirement strings."""
    requirement_strings = sorted(requirement_strings)
    if len(requirement_strings) == 0:
      req_strings_id = 'no_requirements'
    elif len(requirement_strings) == 1:
      req_strings_id = requirement_strings[0]
    else:
      req_strings_id = hash_all(requirement_strings)

    path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), req_strings_id))
    if not os.path.isdir(path):
      reqs = [PythonRequirement(req_str) for req_str in requirement_strings]
      with safe_concurrent_creation(path) as safe_path:
        builder = PEXBuilder(path=safe_path, interpreter=interpreter, copy=True)
        dump_requirements(builder, interpreter, reqs, self.context.log)
        builder.freeze()
    return PEX(path, interpreter=interpreter)
Exemplo n.º 45
0
    def bootstrap(self, interpreter, pex_file_path, extra_reqs=None):
        # Caching is done just by checking if the file at the specified path is already executable.
        if not is_executable(pex_file_path):
            pex_info = PexInfo.default(interpreter=interpreter)
            if self.entry_point is not None:
                pex_info.entry_point = self.entry_point

            with safe_concurrent_creation(pex_file_path) as safe_path:
                all_reqs = list(self.base_requirements) + list(extra_reqs
                                                               or [])
                pex_builder = PexBuilderWrapper.Factory.create(
                    builder=PEXBuilder(interpreter=interpreter,
                                       pex_info=pex_info))
                pex_builder.add_resolved_requirements(all_reqs,
                                                      platforms=['current'])
                pex_builder.build(safe_path)

        return PEX(pex_file_path, interpreter)
Exemplo n.º 46
0
 def bootstrap_conan(self):
     pex_info = PexInfo.default()
     pex_info.entry_point = 'conans.conan'
     conan_bootstrap_dir = os.path.join(get_pants_cachedir(),
                                        'conan_support')
     conan_pex_path = os.path.join(conan_bootstrap_dir, 'conan_binary')
     interpreter = PythonInterpreter.get()
     if not os.path.exists(conan_pex_path):
         with safe_concurrent_creation(conan_pex_path) as safe_path:
             builder = PEXBuilder(safe_path, interpreter, pex_info=pex_info)
             reqs = [
                 PythonRequirement(req)
                 for req in self.get_options().conan_requirements
             ]
             dump_requirements(builder, interpreter, reqs, logger)
             builder.freeze()
     conan_binary = PEX(conan_pex_path, interpreter)
     return self.ConanBinary(pex=conan_binary)
Exemplo n.º 47
0
    def resolve_requirements(self, interpreter, req_libs):
        """Requirements resolution for PEX files.

    :param interpreter: Resolve against this :class:`PythonInterpreter`.
    :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve.
    :returns: a PEX containing target requirements and any specified python dist targets.
    """
        with self.invalidated(req_libs) as invalidation_check:
            # If there are no relevant targets, we still go through the motions of resolving
            # an empty set of requirements, to prevent downstream tasks from having to check
            # for this special case.
            if invalidation_check.all_vts:
                target_set_id = VersionedTargetSet.from_versioned_targets(
                    invalidation_check.all_vts).cache_key.hash
            else:
                target_set_id = 'no_targets'

            # We need to ensure that we are resolving for only the current platform if we are
            # including local python dist targets that have native extensions.
            tgts = self.context.targets()
            if self._python_native_code_settings.check_build_for_current_platform_only(
                    tgts):
                maybe_platforms = ['current']
            else:
                maybe_platforms = None

            path = os.path.realpath(
                os.path.join(self.workdir, str(interpreter.identity),
                             target_set_id))
            # Note that we check for the existence of the directory, instead of for invalid_vts,
            # to cover the empty case.
            if not os.path.isdir(path):
                with safe_concurrent_creation(path) as safe_path:
                    builder = PEXBuilder(path=safe_path,
                                         interpreter=interpreter,
                                         copy=True)
                    dump_requirement_libs(builder,
                                          interpreter,
                                          req_libs,
                                          self.context.log,
                                          platforms=maybe_platforms)
                    builder.freeze()
        return PEX(path, interpreter=interpreter)
Exemplo n.º 48
0
  def do_resolve(cls, executor, extra_args, ivyxml, jvm_options, workdir_report_paths_by_conf,
                 confs, ivy_cache_dir, ivy_cache_classpath_filename, resolve_hash_name,
                 workunit_factory, workunit_name):
    """Execute Ivy with the given ivy.xml and copies all relevant files into the workdir.

    This method does an Ivy resolve, which may be either a Pants resolve or a Pants fetch depending
    on whether there is an existing frozen resolution.

    After it is run, the Ivy reports are copied into the workdir at the paths specified by
    workdir_report_paths_by_conf along with a file containing a list of all the requested artifacts
    and their transitive dependencies.

    :param executor: A JVM executor to use to invoke ivy.
    :param extra_args: Extra arguments to pass to ivy.
    :param ivyxml: The input ivy.xml containing the dependencies to resolve.
    :param jvm_options: A list of jvm option strings to use for the ivy invoke, or None.
    :param workdir_report_paths_by_conf: A dict mapping confs to report paths in the workdir.
    :param confs: The confs used in the resolve.
    :param resolve_hash_name: The hash to use as the module name for finding the ivy report file.
    :param workunit_factory: A workunit factory for the ivy invoke, or None.
    :param workunit_name: A workunit name for the ivy invoke, or None.
    """
    ivy = Bootstrapper.default_ivy(bootstrap_workunit_factory=workunit_factory)

    with safe_concurrent_creation(ivy_cache_classpath_filename) as raw_target_classpath_file_tmp:
      extra_args = extra_args or []
      args = ['-cachepath', raw_target_classpath_file_tmp] + extra_args

      with cls._ivy_lock:
        cls._exec_ivy(ivy, confs, ivyxml, args,
                      jvm_options=jvm_options,
                      executor=executor,
                      workunit_name=workunit_name,
                      workunit_factory=workunit_factory)

      if not os.path.exists(raw_target_classpath_file_tmp):
        raise cls.IvyError('Ivy failed to create classpath file at {}'
                           .format(raw_target_classpath_file_tmp))

      cls._copy_ivy_reports(workdir_report_paths_by_conf, confs, ivy_cache_dir, resolve_hash_name)

    logger.debug('Moved ivy classfile file to {dest}'
                 .format(dest=ivy_cache_classpath_filename))
  def resolve_requirements(self, req_libs):
    with self.invalidated(req_libs) as invalidation_check:
      # If there are no relevant targets, we still go through the motions of resolving
      # an empty set of requirements, to prevent downstream tasks from having to check
      # for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
            invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'

      interpreter = self.context.products.get_data(PythonInterpreter)
      path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id))

      # Note that we check for the existence of the directory, instead of for invalid_vts,
      # to cover the empty case.
      if not os.path.isdir(path):
        with safe_concurrent_creation(path) as safe_path:
          self._build_requirements_pex(interpreter, safe_path, req_libs)
    return PEX(path, interpreter=interpreter)
Exemplo n.º 50
0
  def _do_resolve(self, confs, executor, extra_args, global_vts, pinned_artifacts,
                       raw_target_classpath_file, resolve_hash_name, resolve_workdir,
                       workunit_name):
    safe_mkdir(resolve_workdir)
    ivy = Bootstrapper.default_ivy(bootstrap_workunit_factory=self.context.new_workunit)

    with safe_concurrent_creation(raw_target_classpath_file) as raw_target_classpath_file_tmp:
      args = ['-cachepath', raw_target_classpath_file_tmp] + extra_args

      targets = global_vts.targets
      # TODO(John Sirois): merge the code below into IvyUtils or up here; either way, better
      # diagnostics can be had in `IvyUtils.generate_ivy` if this is done.
      # See: https://github.com/pantsbuild/pants/issues/2239
      jars, global_excludes = IvyUtils.calculate_classpath(targets)

      # Don't pass global excludes to ivy when using soft excludes.
      if self.get_options().soft_excludes:
        global_excludes = []

      ivyxml = self._ivy_xml_path(resolve_workdir)
      with IvyUtils.ivy_lock:
        try:
          IvyUtils.generate_ivy(targets, jars, global_excludes, ivyxml, confs,
                                resolve_hash_name, pinned_artifacts)
        except IvyUtils.IvyError as e:
          raise self.Error('Failed to prepare ivy resolve: {}'.format(e))

        self._exec_ivy(ivy, executor, confs, ivyxml, args, workunit_name)

        # Copy ivy resolve file into resolve workdir.
        for conf in confs:
          atomic_copy(IvyUtils.xml_report_path(self.ivy_cache_dir, resolve_hash_name, conf),
                      self._resolve_report_path(resolve_workdir, conf))

      if not os.path.exists(raw_target_classpath_file_tmp):
        raise self.Error('Ivy failed to create classpath file at {}'
                         .format(raw_target_classpath_file_tmp))

    logger.debug('Moved ivy classfile file to {dest}'.format(dest=raw_target_classpath_file))
Exemplo n.º 51
0
  def resolve_requirements(self, interpreter, req_libs):
    """Requirements resolution for PEX files.

    NB: This method always resolve all requirements in `req_libs` for the 'current' platform! Tasks
    such as PythonBinaryCreate which export code meant for other machines to run will need to
    resolve against the platforms specified by the target or via pants options.

    :param interpreter: Resolve against this :class:`PythonInterpreter`.
    :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve.
    :returns: a PEX containing target requirements and any specified python dist targets.
    """
    with self.invalidated(req_libs) as invalidation_check:
      # If there are no relevant targets, we still go through the motions of resolving
      # an empty set of requirements, to prevent downstream tasks from having to check
      # for this special case.
      if invalidation_check.all_vts:
        target_set_id = VersionedTargetSet.from_versioned_targets(
            invalidation_check.all_vts).cache_key.hash
      else:
        target_set_id = 'no_targets'

      # NB: Since PythonBinaryCreate is the only task that exports python code for use outside the
      # host system, it's the only python task that needs to resolve for non-'current'
      # platforms. PythonBinaryCreate will actually validate the platforms itself when resolving
      # requirements, instead of using this method, so we can always resolve for 'current' here in
      # order to pull in any binary or universal dists needed for the currently executing host.
      platforms = ['current']

      path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id))
      # Note that we check for the existence of the directory, instead of for invalid_vts,
      # to cover the empty case.
      if not os.path.isdir(path):
        with safe_concurrent_creation(path) as safe_path:
          pex_builder = PexBuilderWrapper.Factory.create(
            builder=PEXBuilder(path=safe_path, interpreter=interpreter, copy=True),
            log=self.context.log)
          pex_builder.add_requirement_libs_from(req_libs, platforms=platforms)
          pex_builder.freeze()
    return PEX(path, interpreter=interpreter)
Exemplo n.º 52
0
  def _compile_target(self, vt):
    """'Compiles' a python target.

    'Compiling' means forming an isolated chroot of its sources and transitive deps and then
    attempting to import each of the target's sources in the case of a python library or else the
    entry point in the case of a python binary.

    For a library with sources lib/core.py and lib/util.py a "compiler" main file would look like:

      if __name__ == '__main__':
        import lib.core
        import lib.util

    For a binary with entry point lib.bin:main the "compiler" main file would look like:

      if __name__ == '__main__':
        from lib.bin import main

    In either case the main file is executed within the target chroot to reveal missing BUILD
    dependencies.
    """
    target = vt.target
    with self.context.new_workunit(name=target.address.spec):
      modules = self._get_modules(target)
      if not modules:
        # Nothing to eval, so a trivial compile success.
        return 0

      interpreter = self._get_interpreter_for_target_closure(target)
      reqs_pex = self._resolve_requirements_for_versioned_target_closure(interpreter, vt)
      srcs_pex = self._source_pex_for_versioned_target_closure(interpreter, vt)

      # Create the executable pex.
      exec_pex_parent = os.path.join(self.workdir, 'executable_pex')
      executable_file_content = self._get_executable_file_content(exec_pex_parent, modules)

      hasher = hashlib.sha1()
      hasher.update(reqs_pex.path().encode('utf-8'))
      hasher.update(srcs_pex.path().encode('utf-8'))
      hasher.update(executable_file_content.encode('utf-8'))
      exec_file_hash = hasher.hexdigest()
      exec_pex_path = os.path.realpath(os.path.join(exec_pex_parent, exec_file_hash))
      if not os.path.isdir(exec_pex_path):
        with safe_concurrent_creation(exec_pex_path) as safe_path:
          # Write the entry point.
          safe_mkdir(safe_path)
          with open(os.path.join(safe_path, '{}.py'.format(self._EXEC_NAME)), 'w') as outfile:
            outfile.write(executable_file_content)
          pex_info = (target.pexinfo if isinstance(target, PythonBinary) else None) or PexInfo()
          # Override any user-specified entry point, under the assumption that the
          # executable_file_content does what the user intends (including, probably, calling that
          # underlying entry point).
          pex_info.entry_point = self._EXEC_NAME
          pex_info.pex_path = ':'.join(pex.path() for pex in (reqs_pex, srcs_pex) if pex)
          builder = PEXBuilder(safe_path, interpreter, pex_info=pex_info)
          builder.freeze()

      pex = PEX(exec_pex_path, interpreter)

      with self.context.new_workunit(name='eval',
                                     labels=[WorkUnitLabel.COMPILER, WorkUnitLabel.RUN,
                                             WorkUnitLabel.TOOL],
                                     cmd=' '.join(pex.cmdline())) as workunit:
        returncode = pex.run(stdout=workunit.output('stdout'), stderr=workunit.output('stderr'))
        workunit.set_outcome(WorkUnit.SUCCESS if returncode == 0 else WorkUnit.FAILURE)
        if returncode != 0:
          self.context.log.error('Failed to eval {}'.format(target.address.spec))
        return returncode
Exemplo n.º 53
0
 def _source_pex_for_versioned_target_closure(self, interpreter, vt):
   source_pex_path = os.path.realpath(os.path.join(self.workdir, vt.cache_key.hash))
   if not os.path.isdir(source_pex_path):
     with safe_concurrent_creation(source_pex_path) as safe_path:
       self._build_source_pex(interpreter, safe_path, vt.target.closure())
   return PEX(source_pex_path, interpreter=interpreter)
Exemplo n.º 54
0
 def _setup_interpreter(self, interpreter, cache_target_path):
   with safe_concurrent_creation(cache_target_path) as safe_path:
     os.mkdir(safe_path)  # Parent will already have been created by safe_concurrent_creation.
     os.symlink(interpreter.binary, os.path.join(safe_path, 'python'))
     return self._resolve(interpreter, safe_path)