コード例 #1
0
ファイル: python_chroot.py プロジェクト: jtrobec/pants
    def __init__(self,
                 python_setup,
                 python_repos,
                 ivy_bootstrapper,
                 thrift_binary_factory,
                 interpreter,
                 builder,
                 targets,
                 platforms,
                 extra_requirements=None,
                 log=None):
        self._python_setup = python_setup
        self._python_repos = python_repos
        self._ivy_bootstrapper = ivy_bootstrapper
        self._thrift_binary_factory = thrift_binary_factory

        self._interpreter = interpreter
        self._builder = builder
        self._targets = targets
        self._platforms = platforms
        self._extra_requirements = list(
            extra_requirements) if extra_requirements else []
        self._logger = log or logger

        # Note: unrelated to the general pants artifact cache.
        self._artifact_cache_root = os.path.join(
            self._python_setup.artifact_cache_dir,
            str(self._interpreter.identity))
        self._key_generator = CacheKeyGenerator()
        self._build_invalidator = BuildInvalidator(self._artifact_cache_root)
コード例 #2
0
    def __init__(self, context, workdir):
        """Subclass __init__ methods, if defined, *must* follow this idiom:

    class MyTask(Task):
      def __init__(self, *args, **kwargs):
        super(MyTask, self).__init__(*args, **kwargs)
        ...

    This allows us to change Task.__init__()'s arguments without
    changing every subclass. If the subclass does not need its own
    initialization, this method can (and should) be omitted entirely.
    """
        super(TaskBase, self).__init__()
        self.context = context
        self._workdir = workdir
        # TODO: It would be nice to use self.get_options().cache_key_gen_version here, because then
        # we could have a separate value for each scope if we really wanted to. However we can't
        # access per-task options in Task.__init__ because GroupTask.__init__ calls it with the
        # group task's scope, which isn't currently in the known scopes we generate options for.
        self._cache_key_generator = CacheKeyGenerator(
            self.context.options.for_global_scope().cache_key_gen_version)

        self._cache_key_errors = set()

        self._build_invalidator_dir = os.path.join(
            self.context.options.for_global_scope().pants_workdir,
            'build_invalidator', self.stable_name())

        self._cache_factory = CacheSetup.create_cache_factory_for_task(self)

        self._options_fingerprinter = OptionsFingerprinter(
            self.context.build_graph)
        self._fingerprint = None
コード例 #3
0
ファイル: task.py プロジェクト: rkstap/pants
  def _do_invalidation_check(self,
                             fingerprint_strategy,
                             invalidate_dependents,
                             targets,
                             topological_order):

    if self._cache_factory.ignore:
      cache_key_generator = UncacheableCacheKeyGenerator()
    else:
      cache_key_generator = CacheKeyGenerator(
        self.context.options.for_global_scope().cache_key_gen_version,
        self.fingerprint)

    cache_manager = InvalidationCacheManager(self.workdir,
                                             cache_key_generator,
                                             self._build_invalidator(),
                                             invalidate_dependents,
                                             fingerprint_strategy=fingerprint_strategy,
                                             invalidation_report=self.context.invalidation_report,
                                             task_name=self._task_name,
                                             task_version=self.implementation_version_str(),
                                             artifact_write_callback=self.maybe_write_artifact)

    # If this Task's execution has been forced, invalidate all our target fingerprints.
    if self._cache_factory.ignore and not self._force_invalidated:
      self.invalidate()
      self._force_invalidated = True

    return cache_manager.check(targets, topological_order=topological_order)
コード例 #4
0
ファイル: cache_manager.py プロジェクト: thomasuster/pants
    def __init__(self, cache_manager, versioned_targets):
        self._cache_manager = cache_manager
        self.versioned_targets = versioned_targets
        self.targets = [vt.target for vt in versioned_targets]

        # The following line is a no-op if cache_key was set in the VersionedTarget __init__ method.
        self.cache_key = CacheKeyGenerator.combine_cache_keys(
            [vt.cache_key for vt in versioned_targets])
        # NB: previous_cache_key may be None on the first build of a target.
        self.previous_cache_key = cache_manager.previous_key(self.cache_key)
        self.valid = self.previous_cache_key == self.cache_key

        if cache_manager.invalidation_report:
            cache_manager.invalidation_report.add_vts(cache_manager,
                                                      self.targets,
                                                      self.cache_key,
                                                      self.valid,
                                                      phase='init')

        self._results_dir = None
        self._current_results_dir = None
        self._previous_results_dir = None
        # True if the results_dir for this VT was created incrementally via clone of the
        # previous results_dir.
        self.is_incremental = False
コード例 #5
0
ファイル: python_chroot.py プロジェクト: grimreaper/pants
  def __init__(self,
               python_setup,
               python_repos,
               ivy_bootstrapper,
               thrift_binary_factory,
               interpreter,
               builder,
               targets,
               platforms,
               extra_requirements=None,
               log=None):
    self._python_setup = python_setup
    self._python_repos = python_repos
    self._ivy_bootstrapper = ivy_bootstrapper
    self._thrift_binary_factory = thrift_binary_factory

    self._interpreter = interpreter
    self._builder = builder
    self._targets = targets
    self._platforms = platforms
    self._extra_requirements = list(extra_requirements) if extra_requirements else []
    self._logger = log or logger

    # Note: unrelated to the general pants artifact cache.
    self._artifact_cache_root = os.path.join(
      self._python_setup.artifact_cache_dir, str(self._interpreter.identity))
    self._key_generator = CacheKeyGenerator()
    self._build_invalidator = BuildInvalidator(self._artifact_cache_root)
コード例 #6
0
ファイル: cache_manager.py プロジェクト: ahamilton55/pants
    def __init__(self, cache_manager, versioned_targets):
        """
    :API: public
    """
        self._cache_manager = cache_manager
        self.versioned_targets = versioned_targets
        self.targets = [vt.target for vt in versioned_targets]

        # The following line is a no-op if cache_key was set in the VersionedTarget __init__ method.
        self.cache_key = CacheKeyGenerator.combine_cache_keys([vt.cache_key for vt in versioned_targets])
        # NB: previous_cache_key may be None on the first build of a target.
        self.previous_cache_key = cache_manager.previous_key(self.cache_key)
        self.valid = self.previous_cache_key == self.cache_key

        self.num_chunking_units = self.cache_key.num_chunking_units
        if cache_manager.invalidation_report:
            cache_manager.invalidation_report.add_vts(
                cache_manager, self.targets, self.cache_key, self.valid, phase="init"
            )

        self._results_dir = None
        self._previous_results_dir = None
        # True if the results_dir for this VT was created incrementally via clone of the
        # previous results_dir.
        self.is_incremental = False
コード例 #7
0
 def setUp(self):
   super(InvalidationCacheManagerTest, self).setUp()
   self._dir = tempfile.mkdtemp()
   self.cache_manager = InvalidationCacheManager(
     results_dir_root=os.path.join(self._dir, 'results'),
     cache_key_generator=CacheKeyGenerator(),
     build_invalidator=BuildInvalidator(os.path.join(self._dir, 'build_invalidator')),
     invalidate_dependents=True,
   )
コード例 #8
0
 def setUp(self):
     super().setUp()
     self._dir = tempfile.mkdtemp()
     self.cache_manager = InvalidationCacheManager(
         results_dir_root=os.path.join(self._dir, "results"),
         cache_key_generator=CacheKeyGenerator(),
         build_invalidator=BuildInvalidator(
             os.path.join(self._dir, "build_invalidator")),
         invalidate_dependents=True,
         task_version_slug="deadbeef",
     )
コード例 #9
0
ファイル: cache_manager.py プロジェクト: priyakoth/pants
 def __init__(self, cache_manager, versioned_targets):
     self._cache_manager = cache_manager
     self.versioned_targets = versioned_targets
     self.targets = [vt.target for vt in versioned_targets]
     # The following line is a no-op if cache_key was set in the VersionedTarget __init__ method.
     self.cache_key = CacheKeyGenerator.combine_cache_keys([vt.cache_key for vt in versioned_targets])
     self.num_chunking_units = self.cache_key.num_chunking_units
     self.valid = not cache_manager.needs_update(self.cache_key)
     if cache_manager.invalidation_report:
         cache_manager.invalidation_report.add_vts(
             cache_manager, self.targets, self.cache_key, self.valid, phase="init"
         )
コード例 #10
0
 def __init__(self, cache_manager, versioned_targets):
     self._cache_manager = cache_manager
     self.versioned_targets = versioned_targets
     self.targets = [vt.target for vt in versioned_targets]
     # The following line is a no-op if cache_key was set in the VersionedTarget __init__ method.
     self.cache_key = CacheKeyGenerator.combine_cache_keys(
         [vt.cache_key for vt in versioned_targets])
     self.num_chunking_units = self.cache_key.num_chunking_units
     self.valid = not cache_manager.needs_update(self.cache_key)
     if cache_manager.invalidation_report:
         cache_manager.invalidation_report.add_vts(cache_manager,
                                                   self.targets,
                                                   self.cache_key,
                                                   self.valid,
                                                   phase='init')
コード例 #11
0
ファイル: python_chroot.py プロジェクト: grimreaper/pants
class PythonChroot(object):
  _VALID_DEPENDENCIES = {
    Files: 'files',
    PrepCommand: 'prep',
    PythonLibrary: 'libraries',
    PythonRequirementLibrary: 'reqs',
    PythonBinary: 'binaries',
    PythonThriftLibrary: 'thrifts',
    PythonAntlrLibrary: 'antlrs',
    PythonTests: 'tests',
    Resources: 'resources'
  }

  class InvalidDependencyException(Exception):
    def __init__(self, target):
      super(PythonChroot.InvalidDependencyException, self).__init__(
        'Not a valid Python dependency! Found: {}'.format(target))

  @staticmethod
  def get_platforms(platform_list):
    return tuple({Platform.current() if p == 'current' else p for p in platform_list})

  def __init__(self,
               python_setup,
               python_repos,
               ivy_bootstrapper,
               thrift_binary_factory,
               interpreter,
               builder,
               targets,
               platforms,
               extra_requirements=None,
               log=None):
    self._python_setup = python_setup
    self._python_repos = python_repos
    self._ivy_bootstrapper = ivy_bootstrapper
    self._thrift_binary_factory = thrift_binary_factory

    self._interpreter = interpreter
    self._builder = builder
    self._targets = targets
    self._platforms = platforms
    self._extra_requirements = list(extra_requirements) if extra_requirements else []
    self._logger = log or logger

    # Note: unrelated to the general pants artifact cache.
    self._artifact_cache_root = os.path.join(
      self._python_setup.artifact_cache_dir, str(self._interpreter.identity))
    self._key_generator = CacheKeyGenerator()
    self._build_invalidator = BuildInvalidator(self._artifact_cache_root)

  def delete(self):
    """Deletes this chroot from disk if it has been dumped."""
    safe_rmtree(self.path())

  def debug(self, msg):
    self._logger.debug(msg)

  def path(self):
    return os.path.realpath(self._builder.path())

  def pex(self):
    return PEX(self.path(), interpreter=self._interpreter)

  def package_pex(self, filename):
    """Package into a PEX zipfile.

    :param filename: The filename where the PEX should be stored.
    """
    self._builder.build(filename)

  def _dump_library(self, library):
    def copy_to_chroot(base, path, add_function):
      src = os.path.join(get_buildroot(), base, path)
      add_function(src, path)

    self.debug('  Dumping library: {}'.format(library))
    for relpath in library.sources_relative_to_source_root():
      try:
        copy_to_chroot(library.target_base, relpath, self._builder.add_source)
      except OSError:
        logger.error("Failed to copy {path} for library {library}"
                     .format(path=os.path.join(library.target_base, relpath),
                             library=library))
        raise

    for resources_tgt in library.resources:
      for resource_file_from_source_root in resources_tgt.sources_relative_to_source_root():
        try:
          copy_to_chroot(resources_tgt.target_base, resource_file_from_source_root,
                         self._builder.add_resource)
        except OSError:
          logger.error("Failed to copy {path} for resource {resource}"
                       .format(path=os.path.join(resources_tgt.target_base,
                                                 resource_file_from_source_root),
                               resource=resources_tgt.address.spec))
          raise

  def _dump_requirement(self, req):
    self.debug('  Dumping requirement: {}'.format(req))
    self._builder.add_requirement(req)

  def _dump_distribution(self, dist):
    self.debug('  Dumping distribution: .../{}'.format(os.path.basename(dist.location)))
    self._builder.add_distribution(dist)

  def _generate_requirement(self, library, builder_cls):
    library_key = self._key_generator.key_for_target(library)
    builder = builder_cls(target=library,
                          root_dir=get_buildroot(),
                          target_suffix='-' + library_key.hash[:8])

    cache_dir = os.path.join(self._artifact_cache_root, library_key.id)
    if self._build_invalidator.needs_update(library_key):
      sdist = builder.build(interpreter=self._interpreter)
      safe_mkdir(cache_dir)
      shutil.copy(sdist, os.path.join(cache_dir, os.path.basename(sdist)))
      self._build_invalidator.update(library_key)

    return PythonRequirement(builder.requirement_string(), repository=cache_dir, use_2to3=True)

  def _generate_thrift_requirement(self, library):
    thrift_builder = functools.partial(PythonThriftBuilder,
                                       thrift_binary_factory=self._thrift_binary_factory,
                                       workdir=safe_mkdtemp(dir=self.path(), prefix='thrift.'))
    return self._generate_requirement(library, thrift_builder)

  def _generate_antlr_requirement(self, library):
    antlr_builder = functools.partial(PythonAntlrBuilder,
                                      ivy_bootstrapper=self._ivy_bootstrapper,
                                      workdir=safe_mkdtemp(dir=self.path(), prefix='antlr.'))
    return self._generate_requirement(library, antlr_builder)

  def resolve(self, targets):
    children = defaultdict(OrderedSet)

    def add_dep(trg):
      for target_type, target_key in self._VALID_DEPENDENCIES.items():
        if isinstance(trg, target_type):
          children[target_key].add(trg)
          return
        elif type(trg) == Target:
          return
      raise self.InvalidDependencyException(trg)
    for target in targets:
      target.walk(add_dep)
    return children

  def dump(self):
    self.debug('Building chroot for {}:'.format(self._targets))
    targets = self.resolve(self._targets)

    for lib in targets['libraries'] | targets['binaries']:
      self._dump_library(lib)

    generated_reqs = OrderedSet()
    if targets['thrifts']:
      for thr in targets['thrifts']:
        generated_reqs.add(self._generate_thrift_requirement(thr))
      generated_reqs.add(PythonRequirement('thrift', use_2to3=True))

    for antlr in targets['antlrs']:
      generated_reqs.add(self._generate_antlr_requirement(antlr))

    reqs_from_libraries = OrderedSet()
    for req_lib in targets['reqs']:
      for req in req_lib.payload.requirements:
        reqs_from_libraries.add(req)

    reqs_to_build = OrderedSet()
    find_links = OrderedSet()

    for req in reqs_from_libraries | generated_reqs | self._extra_requirements:
      if not req.should_build(self._interpreter.python, Platform.current()):
        self.debug('Skipping {} based upon version filter'.format(req))
        continue
      reqs_to_build.add(req)
      self._dump_requirement(req.requirement)
      if req.repository:
        find_links.add(req.repository)

    distributions = self._resolve_multi(reqs_to_build, find_links)

    locations = set()
    for platform, dist_set in distributions.items():
      for dist in dist_set:
        if dist.location not in locations:
          self._dump_distribution(dist)
        locations.add(dist.location)

    if len(targets['binaries']) > 1:
      print('WARNING: Target has multiple python_binary targets!', file=sys.stderr)

    return self._builder

  def _resolve_multi(self, requirements, find_links):
    """Multi-platform dependency resolution for PEX files.

       Given a pants configuration and a set of requirements, return a map of platform name -> list
       of :class:`pkg_resources.Distribution` instances needed to satisfy them on that platform.
       That may involve distributions for multiple platforms.

       :param requirements: A list of :class:`PythonRequirement` objects to resolve.
       :param find_links: Additional paths to search for source packages during resolution.
    """
    distributions = dict()
    platforms = self.get_platforms(self._platforms or self._python_setup.platforms)
    fetchers = self._python_repos.get_fetchers()
    fetchers.extend(Fetcher([path]) for path in find_links)
    context = self._python_repos.get_network_context()

    for platform in platforms:
      requirements_cache_dir = os.path.join(self._python_setup.resolver_cache_dir,
                                            str(self._interpreter.identity))
      distributions[platform] = resolve(
        requirements=[req.requirement for req in requirements],
        interpreter=self._interpreter,
        fetchers=fetchers,
        platform=platform,
        context=context,
        cache=requirements_cache_dir,
        cache_ttl=self._python_setup.resolver_cache_ttl,
        allow_prereleases=self._python_setup.resolver_allow_prereleases)

    return distributions
コード例 #12
0
ファイル: python_chroot.py プロジェクト: jtrobec/pants
class PythonChroot(object):
    _VALID_DEPENDENCIES = {
        PrepCommand: 'prep',
        PythonLibrary: 'libraries',
        PythonRequirementLibrary: 'reqs',
        PythonBinary: 'binaries',
        PythonThriftLibrary: 'thrifts',
        PythonAntlrLibrary: 'antlrs',
        PythonTests: 'tests',
        Resources: 'resources'
    }

    class InvalidDependencyException(Exception):
        def __init__(self, target):
            super(PythonChroot.InvalidDependencyException, self).__init__(
                'Not a valid Python dependency! Found: {}'.format(target))

    @staticmethod
    def get_platforms(platform_list):
        return tuple({
            Platform.current() if p == 'current' else p
            for p in platform_list
        })

    def __init__(self,
                 python_setup,
                 python_repos,
                 ivy_bootstrapper,
                 thrift_binary_factory,
                 interpreter,
                 builder,
                 targets,
                 platforms,
                 extra_requirements=None,
                 log=None):
        self._python_setup = python_setup
        self._python_repos = python_repos
        self._ivy_bootstrapper = ivy_bootstrapper
        self._thrift_binary_factory = thrift_binary_factory

        self._interpreter = interpreter
        self._builder = builder
        self._targets = targets
        self._platforms = platforms
        self._extra_requirements = list(
            extra_requirements) if extra_requirements else []
        self._logger = log or logger

        # Note: unrelated to the general pants artifact cache.
        self._artifact_cache_root = os.path.join(
            self._python_setup.artifact_cache_dir,
            str(self._interpreter.identity))
        self._key_generator = CacheKeyGenerator()
        self._build_invalidator = BuildInvalidator(self._artifact_cache_root)

    def delete(self):
        """Deletes this chroot from disk if it has been dumped."""
        safe_rmtree(self.path())

    def debug(self, msg):
        self._logger.debug(msg)

    def path(self):
        return os.path.realpath(self._builder.path())

    def pex(self):
        return PEX(self.path(), interpreter=self._interpreter)

    def package_pex(self, filename):
        """Package into a PEX zipfile.

    :param filename: The filename where the PEX should be stored.
    """
        self._builder.build(filename)

    def _dump_library(self, library):
        def copy_to_chroot(base, path, add_function):
            src = os.path.join(get_buildroot(), base, path)
            add_function(src, path)

        self.debug('  Dumping library: {}'.format(library))
        for relpath in library.sources_relative_to_source_root():
            try:
                copy_to_chroot(library.target_base, relpath,
                               self._builder.add_source)
            except OSError:
                logger.error(
                    "Failed to copy {path} for library {library}".format(
                        path=os.path.join(library.target_base, relpath),
                        library=library))
                raise

        for resources_tgt in library.resources:
            for resource_file_from_source_root in resources_tgt.sources_relative_to_source_root(
            ):
                try:
                    copy_to_chroot(resources_tgt.target_base,
                                   resource_file_from_source_root,
                                   self._builder.add_resource)
                except OSError:
                    logger.error(
                        "Failed to copy {path} for resource {resource}".format(
                            path=os.path.join(resources_tgt.target_base,
                                              resource_file_from_source_root),
                            resource=resources_tgt.address.spec))
                    raise

    def _dump_requirement(self, req):
        self.debug('  Dumping requirement: {}'.format(req))
        self._builder.add_requirement(req)

    def _dump_distribution(self, dist):
        self.debug('  Dumping distribution: .../{}'.format(
            os.path.basename(dist.location)))
        self._builder.add_distribution(dist)

    def _generate_requirement(self, library, builder_cls):
        library_key = self._key_generator.key_for_target(library)
        builder = builder_cls(target=library,
                              root_dir=get_buildroot(),
                              target_suffix='-' + library_key.hash[:8])

        cache_dir = os.path.join(self._artifact_cache_root, library_key.id)
        if self._build_invalidator.needs_update(library_key):
            sdist = builder.build(interpreter=self._interpreter)
            safe_mkdir(cache_dir)
            shutil.copy(sdist, os.path.join(cache_dir,
                                            os.path.basename(sdist)))
            self._build_invalidator.update(library_key)

        return PythonRequirement(builder.requirement_string(),
                                 repository=cache_dir,
                                 use_2to3=True)

    def _generate_thrift_requirement(self, library):
        thrift_builder = functools.partial(
            PythonThriftBuilder,
            thrift_binary_factory=self._thrift_binary_factory,
            workdir=safe_mkdtemp(dir=self.path(), prefix='thrift.'))
        return self._generate_requirement(library, thrift_builder)

    def _generate_antlr_requirement(self, library):
        antlr_builder = functools.partial(
            PythonAntlrBuilder,
            ivy_bootstrapper=self._ivy_bootstrapper,
            workdir=safe_mkdtemp(dir=self.path(), prefix='antlr.'))
        return self._generate_requirement(library, antlr_builder)

    def resolve(self, targets):
        children = defaultdict(OrderedSet)

        def add_dep(trg):
            # Currently we handle all of our code generation, so we don't want to operate over any
            # synthetic targets injected upstream.
            # TODO(John Sirois): Revisit this when building a proper python product pipeline.
            if trg.is_synthetic:
                return

            for target_type, target_key in self._VALID_DEPENDENCIES.items():
                if isinstance(trg, target_type):
                    children[target_key].add(trg)
                    return
                elif type(trg) == Target:
                    return
            raise self.InvalidDependencyException(trg)

        for target in targets:
            target.walk(add_dep)
        return children

    def dump(self):
        self.debug('Building chroot for {}:'.format(self._targets))
        targets = self.resolve(self._targets)

        for lib in targets['libraries'] | targets['binaries']:
            self._dump_library(lib)

        generated_reqs = OrderedSet()
        if targets['thrifts']:
            for thr in targets['thrifts']:
                generated_reqs.add(self._generate_thrift_requirement(thr))
            generated_reqs.add(PythonRequirement('thrift', use_2to3=True))

        for antlr in targets['antlrs']:
            generated_reqs.add(self._generate_antlr_requirement(antlr))

        reqs_from_libraries = OrderedSet()
        for req_lib in targets['reqs']:
            for req in req_lib.payload.requirements:
                reqs_from_libraries.add(req)

        reqs_to_build = OrderedSet()
        find_links = OrderedSet()

        for req in reqs_from_libraries | generated_reqs | self._extra_requirements:
            if not req.should_build(self._interpreter.python,
                                    Platform.current()):
                self.debug('Skipping {} based upon version filter'.format(req))
                continue
            reqs_to_build.add(req)
            self._dump_requirement(req.requirement)
            if req.repository:
                find_links.add(req.repository)

        distributions = self._resolve_multi(reqs_to_build, find_links)

        locations = set()
        for platform, dist_set in distributions.items():
            for dist in dist_set:
                if dist.location not in locations:
                    self._dump_distribution(dist)
                locations.add(dist.location)

        if len(targets['binaries']) > 1:
            print('WARNING: Target has multiple python_binary targets!',
                  file=sys.stderr)

        return self._builder

    def _resolve_multi(self, requirements, find_links):
        """Multi-platform dependency resolution for PEX files.

       Given a pants configuration and a set of requirements, return a list of distributions
       that must be included in order to satisfy them.  That may involve distributions for
       multiple platforms.

       :param requirements: A list of :class:`PythonRequirement` objects to resolve.
       :param find_links: Additional paths to search for source packages during resolution.
    """
        distributions = dict()
        platforms = self.get_platforms(self._platforms
                                       or self._python_setup.platforms)
        fetchers = self._python_repos.get_fetchers()
        fetchers.extend(Fetcher([path]) for path in find_links)
        context = self._python_repos.get_network_context()

        for platform in platforms:
            requirements_cache_dir = os.path.join(
                self._python_setup.resolver_cache_dir,
                str(self._interpreter.identity))
            distributions[platform] = resolve(
                requirements=[req.requirement for req in requirements],
                interpreter=self._interpreter,
                fetchers=fetchers,
                platform=platform,
                context=context,
                cache=requirements_cache_dir,
                cache_ttl=self._python_setup.resolver_cache_ttl)

        return distributions
コード例 #13
0
ファイル: task.py プロジェクト: JustinHendryx/pants
    def invalidated(self,
                    targets,
                    invalidate_dependents=False,
                    silent=False,
                    fingerprint_strategy=None,
                    topological_order=False):
        """Checks targets for invalidation, first checking the artifact cache.

    Subclasses call this to figure out what to work on.

    :API: public

    :param targets: The targets to check for changes.
    :param invalidate_dependents: If True then any targets depending on changed targets are
                                  invalidated.
    :param silent: If true, suppress logging information about target invalidation.
    :param fingerprint_strategy: A FingerprintStrategy instance, which can do per task,
                                finer grained fingerprinting of a given Target.
    :param topological_order: Whether to invalidate in dependency order.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.

    :returns: Yields an InvalidationCheck object reflecting the targets.
    :rtype: InvalidationCheck
    """

        cache_key_generator = CacheKeyGenerator(
            self.context.options.for_global_scope().cache_key_gen_version,
            self.fingerprint)
        cache_manager = InvalidationCacheManager(
            self.workdir,
            cache_key_generator,
            self._build_invalidator_dir,
            invalidate_dependents,
            fingerprint_strategy=fingerprint_strategy,
            invalidation_report=self.context.invalidation_report,
            task_name=type(self).__name__,
            task_version=self.implementation_version_str(),
            artifact_write_callback=self.maybe_write_artifact)

        invalidation_check = cache_manager.check(
            targets, topological_order=topological_order)

        self._maybe_create_results_dirs(invalidation_check.all_vts)

        if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled(
        ):
            with self.context.new_workunit('cache'):
                cached_vts, uncached_vts, uncached_causes = \
                  self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
            if cached_vts:
                cached_targets = [vt.target for vt in cached_vts]
                self.context.run_tracker.artifact_cache_stats.add_hits(
                    cache_manager.task_name, cached_targets)
                if not silent:
                    self._report_targets('Using cached artifacts for ',
                                         cached_targets, '.')
            if uncached_vts:
                uncached_targets = [vt.target for vt in uncached_vts]
                self.context.run_tracker.artifact_cache_stats.add_misses(
                    cache_manager.task_name, uncached_targets, uncached_causes)
                if not silent:
                    self._report_targets('No cached artifacts for ',
                                         uncached_targets, '.')
            # Now that we've checked the cache, re-partition whatever is still invalid.
            invalidation_check = \
              InvalidationCheck(invalidation_check.all_vts, uncached_vts)

        if not silent:
            targets = []
            for vt in invalidation_check.invalid_vts:
                targets.extend(vt.targets)

            if len(targets):
                msg_elements = [
                    'Invalidated ',
                    items_to_report_element(
                        [t.address.reference() for t in targets], 'target'),
                    '.'
                ]
                self.context.log.info(*msg_elements)

        invalidation_report = self.context.invalidation_report
        if invalidation_report:
            for vts in invalidation_check.all_vts:
                invalidation_report.add_vts(cache_manager,
                                            vts.targets,
                                            vts.cache_key,
                                            vts.valid,
                                            phase='pre-check')

        # Cache has been checked to create the full list of invalid VTs.
        # Only copy previous_results for this subset of VTs.
        if self.incremental:
            for vts in invalidation_check.invalid_vts:
                vts.copy_previous_results()

        # Yield the result, and then mark the targets as up to date.
        yield invalidation_check

        if invalidation_report:
            for vts in invalidation_check.all_vts:
                invalidation_report.add_vts(cache_manager,
                                            vts.targets,
                                            vts.cache_key,
                                            vts.valid,
                                            phase='post-check')

        for vt in invalidation_check.invalid_vts:
            vt.update()

        # Background work to clean up previous builds.
        if self.context.options.for_global_scope(
        ).workdir_max_build_entries is not None:
            self._launch_background_workdir_cleanup(invalidation_check.all_vts)
コード例 #14
0
def test_env(content=TEST_CONTENT):
    with temporary_dir() as d:
        with tempfile.NamedTemporaryFile() as f:
            f.write(content)
            f.flush()
            yield f, CacheKeyGenerator(), BuildInvalidator(d)
コード例 #15
0
    def invalidated(self,
                    targets,
                    invalidate_dependents=False,
                    silent=False,
                    fingerprint_strategy=None,
                    topological_order=False):
        """Checks targets for invalidation, first checking the artifact cache.

    Subclasses call this to figure out what to work on.

    :API: public

    :param targets: The targets to check for changes.
    :param invalidate_dependents: If True then any targets depending on changed targets are
                                  invalidated.
    :param silent: If true, suppress logging information about target invalidation.
    :param fingerprint_strategy: A FingerprintStrategy instance, which can do per task,
                                finer grained fingerprinting of a given Target.
    :param topological_order: Whether to invalidate in dependency order.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.

    :returns: Yields an InvalidationCheck object reflecting the targets.
    :rtype: InvalidationCheck
    """

        cache_key_generator = CacheKeyGenerator(
            self.context.options.for_global_scope().cache_key_gen_version,
            self.fingerprint)
        cache_manager = InvalidationCacheManager(
            self.workdir,
            cache_key_generator,
            self._build_invalidator(),
            invalidate_dependents,
            fingerprint_strategy=fingerprint_strategy,
            invalidation_report=self.context.invalidation_report,
            task_name=type(self).__name__,
            task_version=self.implementation_version_str(),
            artifact_write_callback=self.maybe_write_artifact)

        # If this Task's execution has been forced, invalidate all our target fingerprints.
        if self._cache_factory.ignore and not self._force_invalidated:
            self.invalidate()
            self._force_invalidated = True

        invalidation_check = cache_manager.check(
            targets, topological_order=topological_order)

        self._maybe_create_results_dirs(invalidation_check.all_vts)

        if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled(
        ):
            with self.context.new_workunit('cache'):
                cached_vts, uncached_vts, uncached_causes = \
                  self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
            if cached_vts:
                cached_targets = [vt.target for vt in cached_vts]
                self.context.run_tracker.artifact_cache_stats.add_hits(
                    cache_manager.task_name, cached_targets)
                if not silent:
                    self._report_targets('Using cached artifacts for ',
                                         cached_targets, '.')
            if uncached_vts:
                uncached_targets = [vt.target for vt in uncached_vts]
                self.context.run_tracker.artifact_cache_stats.add_misses(
                    cache_manager.task_name, uncached_targets, uncached_causes)
                if not silent:
                    self._report_targets('No cached artifacts for ',
                                         uncached_targets, '.')
            # Now that we've checked the cache, re-partition whatever is still invalid.
            invalidation_check = \
              InvalidationCheck(invalidation_check.all_vts, uncached_vts)

        if not silent:
            targets = []
            for vt in invalidation_check.invalid_vts:
                targets.extend(vt.targets)

            if len(targets):
                msg_elements = [
                    'Invalidated ',
                    items_to_report_element(
                        [t.address.reference() for t in targets], 'target'),
                    '.'
                ]
                self.context.log.info(*msg_elements)

        invalidation_report = self.context.invalidation_report
        if invalidation_report:
            for vts in invalidation_check.all_vts:
                invalidation_report.add_vts(cache_manager,
                                            vts.targets,
                                            vts.cache_key,
                                            vts.valid,
                                            phase='pre-check')

        # Cache has been checked to create the full list of invalid VTs.
        # Only copy previous_results for this subset of VTs.
        if self.incremental:
            for vts in invalidation_check.invalid_vts:
                vts.copy_previous_results()

        # This may seem odd: why would we need to invalidate a VersionedTargetSet that is already
        # invalid?  But the name force_invalidate() is slightly misleading in this context - what it
        # actually does is delete the key file created at the end of the last successful task run.
        # This is necessary to avoid the following scenario:
        #
        # 1) In state A: Task suceeds and writes some output.  Key is recorded by the invalidator.
        # 2) In state B: Task fails, but writes some output.  Key is not recorded.
        # 3) After reverting back to state A: The current key is the same as the one recorded at the
        #    end of step 1), so it looks like no work needs to be done, but actually the task
        #   must re-run, to overwrite the output written in step 2.
        #
        # Deleting the file ensures that if a task fails, there is no key for which we might think
        # we're in a valid state.
        for vts in invalidation_check.invalid_vts:
            vts.force_invalidate()

        # Yield the result, and then mark the targets as up to date.
        yield invalidation_check

        if invalidation_report:
            for vts in invalidation_check.all_vts:
                invalidation_report.add_vts(cache_manager,
                                            vts.targets,
                                            vts.cache_key,
                                            vts.valid,
                                            phase='post-check')

        for vt in invalidation_check.invalid_vts:
            vt.update()

        # Background work to clean up previous builds.
        if self.context.options.for_global_scope(
        ).workdir_max_build_entries is not None:
            self._launch_background_workdir_cleanup(invalidation_check.all_vts)