Пример #1
0
    def test_hash_file(self):
        self.digest.update('jake jones')
        self.digest.hexdigest().AndReturn('1137')
        self.mox.ReplayAll()

        with temporary_file() as fd:
            fd.write('jake jones')
            fd.close()

            self.assertEqual('1137', hash_file(fd.name, digest=self.digest))
Пример #2
0
    def test_hash_file(self):
        self.digest.update("jake jones")
        self.digest.hexdigest().AndReturn("1137")
        self.mox.ReplayAll()

        with temporary_file() as fd:
            fd.write("jake jones")
            fd.close()

            self.assertEqual("1137", hash_file(fd.name, digest=self.digest))
Пример #3
0
  def invalidated(self, targets, only_buildfiles=False, invalidate_dependents=False,
                  partition_size_hint=sys.maxint, silent=False):
    """Checks targets for invalidation, first checking the artifact cache.
    Subclasses call this to figure out what to work on.

    targets:               The targets to check for changes.
    only_buildfiles:       If True, then only the target's BUILD files are checked for changes, not
                           its sources.
    invalidate_dependents: If True then any targets depending on changed targets are invalidated.
    partition_size_hint:   Each VersionedTargetSet in the yielded list will represent targets
                           containing roughly this number of source files, if possible. Set to
                           sys.maxint for a single VersionedTargetSet. Set to 0 for one
                           VersionedTargetSet per target. It is up to the caller to do the right
                           thing with whatever partitioning it asks for.

    Yields an InvalidationCheck object reflecting the (partitioned) targets.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.
    """
    extra_data = []
    extra_data.append(self.invalidate_for())

    for f in self.invalidate_for_files():
      extra_data.append(hash_file(f))

    cache_manager = CacheManager(self._cache_key_generator,
                                 self._build_invalidator_dir,
                                 invalidate_dependents,
                                 extra_data,
                                 only_externaldeps=only_buildfiles)

    invalidation_check = cache_manager.check(targets, partition_size_hint)

    if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled():
      with self.context.new_workunit('cache'):
        cached_vts, uncached_vts = \
          self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
      if cached_vts:
        cached_targets = [vt.target for vt in cached_vts]
        for t in cached_targets:
          self.context.run_tracker.artifact_cache_stats.add_hit('default', t)
        if not silent:
          self._report_targets('Using cached artifacts for ', cached_targets, '.')
      if uncached_vts:
        uncached_targets = [vt.target for vt in uncached_vts]
        for t in uncached_targets:
          self.context.run_tracker.artifact_cache_stats.add_miss('default', t)
        if not silent:
          self._report_targets('No cached artifacts for ', uncached_targets, '.')
      # Now that we've checked the cache, re-partition whatever is still invalid.
      invalidation_check = \
        InvalidationCheck(invalidation_check.all_vts, uncached_vts, partition_size_hint)

    if not silent:
      targets = []
      sources = []
      num_invalid_partitions = len(invalidation_check.invalid_vts_partitioned)
      for vt in invalidation_check.invalid_vts_partitioned:
        targets.extend(vt.targets)
        sources.extend(vt.cache_key.sources)
      if len(targets):
        msg_elements = ['Invalidated ',
                        items_to_report_element([t.address.reference() for t in targets], 'target')]
        if len(sources) > 0:
          msg_elements.append(' containing ')
          msg_elements.append(items_to_report_element(sources, 'source file'))
        if num_invalid_partitions > 1:
          msg_elements.append(' in %d target partitions' % num_invalid_partitions)
        msg_elements.append('.')
        self.context.log.info(*msg_elements)

    # Yield the result, and then mark the targets as up to date.
    yield invalidation_check
    if not self.dry_run:
      for vt in invalidation_check.invalid_vts:
        vt.update()  # In case the caller doesn't update.
Пример #4
0
    def invalidated(self,
                    targets,
                    only_buildfiles=False,
                    invalidate_dependents=False,
                    partition_size_hint=sys.maxint,
                    silent=False):
        """Checks targets for invalidation, first checking the artifact cache.
    Subclasses call this to figure out what to work on.

    targets:               The targets to check for changes.
    only_buildfiles:       If True, then only the target's BUILD files are checked for changes, not
                           its sources.
    invalidate_dependents: If True then any targets depending on changed targets are invalidated.
    partition_size_hint:   Each VersionedTargetSet in the yielded list will represent targets
                           containing roughly this number of source files, if possible. Set to
                           sys.maxint for a single VersionedTargetSet. Set to 0 for one
                           VersionedTargetSet per target. It is up to the caller to do the right
                           thing with whatever partitioning it asks for.

    Yields an InvalidationCheck object reflecting the (partitioned) targets.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.
    """
        extra_data = [self.invalidate_for()]

        for f in self.invalidate_for_files():
            extra_data.append(hash_file(f))

        cache_manager = CacheManager(self._cache_key_generator,
                                     self._build_invalidator_dir,
                                     invalidate_dependents,
                                     extra_data,
                                     only_externaldeps=only_buildfiles)

        invalidation_check = cache_manager.check(targets, partition_size_hint)

        if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled(
        ):
            with self.context.new_workunit('cache'):
                cached_vts, uncached_vts = \
                  self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
            if cached_vts:
                cached_targets = [vt.target for vt in cached_vts]
                for t in cached_targets:
                    self.context.run_tracker.artifact_cache_stats.add_hit(
                        'default', t)
                if not silent:
                    self._report_targets('Using cached artifacts for ',
                                         cached_targets, '.')
            if uncached_vts:
                uncached_targets = [vt.target for vt in uncached_vts]
                for t in uncached_targets:
                    self.context.run_tracker.artifact_cache_stats.add_miss(
                        'default', t)
                if not silent:
                    self._report_targets('No cached artifacts for ',
                                         uncached_targets, '.')
            # Now that we've checked the cache, re-partition whatever is still invalid.
            invalidation_check = \
              InvalidationCheck(invalidation_check.all_vts, uncached_vts, partition_size_hint)

        if not silent:
            targets = []
            sources = []
            num_invalid_partitions = len(
                invalidation_check.invalid_vts_partitioned)
            for vt in invalidation_check.invalid_vts_partitioned:
                targets.extend(vt.targets)
                sources.extend(vt.cache_key.sources)
            if len(targets):
                msg_elements = [
                    'Invalidated ',
                    items_to_report_element(
                        [t.address.reference() for t in targets], 'target')
                ]
                if len(sources) > 0:
                    msg_elements.append(' containing ')
                    msg_elements.append(
                        items_to_report_element(sources, 'source file'))
                if num_invalid_partitions > 1:
                    msg_elements.append(' in %d target partitions' %
                                        num_invalid_partitions)
                msg_elements.append('.')
                self.context.log.info(*msg_elements)

        # Yield the result, and then mark the targets as up to date.
        yield invalidation_check
        if not self.dry_run:
            for vt in invalidation_check.invalid_vts:
                vt.update()  # In case the caller doesn't update.
Пример #5
0
 def log_zinc_file(self, analysis_file):
   self.context.log.debug('Calling zinc on: %s (%s)' % (analysis_file, hash_file(analysis_file).upper() if os.path.exists(analysis_file) else 'nonexistent'))
Пример #6
0
    def invalidated_with_artifact_cache_check(
        self, targets, only_buildfiles=False, invalidate_dependents=False, partition_size_hint=sys.maxint
    ):
        """Checks targets for invalidation, first checking the artifact cache.
    Subclasses call this to figure out what to work on.

    targets:               The targets to check for changes.
    only_buildfiles:       If True, then only the target's BUILD files are checked for changes, not
                           its sources.
    invalidate_dependents: If True then any targets depending on changed targets are invalidated.
    partition_size_hint:   Each VersionedTargetSet in the yielded list will represent targets
                           containing roughly this number of source files, if possible. Set to
                           sys.maxint for a single VersionedTargetSet. Set to 0 for one
                           VersionedTargetSet per target. It is up to the caller to do the right
                           thing with whatever partitioning it asks for.

    Yields a pair of (invalidation_check, cached_vts) where invalidation_check is an
    InvalidationCheck object reflecting the (partitioned) targets, and cached_vts is a list of
    VersionedTargets that were satisfied from the artifact cache.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated, that must be done manually.
    """
        extra_data = []
        extra_data.append(self.invalidate_for())

        for f in self.invalidate_for_files():
            extra_data.append(hash_file(f))

        cache_manager = CacheManager(
            self._cache_key_generator,
            self._build_invalidator_dir,
            invalidate_dependents,
            extra_data,
            only_externaldeps=only_buildfiles,
        )

        initial_invalidation_check = cache_manager.check(targets, partition_size_hint)

        # See if we have entire partitions cached.
        partitions_to_check = [vt for vt in initial_invalidation_check.all_vts_partitioned if not vt.valid]
        cached_partitions, uncached_partitions = self.check_artifact_cache(partitions_to_check)

        # See if we have any individual targets from the uncached partitions.
        uncached_vts = [x.versioned_targets for x in uncached_partitions]
        vts_to_check = [vt for vt in itertools.chain.from_iterable(uncached_vts) if not vt.valid]
        cached_targets, uncached_targets = self.check_artifact_cache(vts_to_check)

        # Now that we've checked the cache, re-partition whatever is still invalid.
        invalidation_check = InvalidationCheck(
            initial_invalidation_check.all_vts, uncached_targets, partition_size_hint
        )

        # Do some reporting.
        num_invalid_partitions = len(invalidation_check.invalid_vts_partitioned)
        num_invalid_targets = 0
        num_invalid_sources = 0
        for vt in invalidation_check.invalid_vts:
            if not vt.valid:
                num_invalid_targets += len(vt.targets)
                num_invalid_sources += vt.cache_key.num_sources

        # Do some reporting.
        if num_invalid_partitions > 0:
            self.context.log.info(
                "Operating on %d files in %d invalidated targets in %d target"
                " partitions" % (num_invalid_sources, num_invalid_targets, num_invalid_partitions)
            )

        # Yield the result, and then mark the targets as up to date.
        yield invalidation_check
        if not self.dry_run:
            for vt in invalidation_check.invalid_vts:
                vt.update()  # In case the caller doesn't update.