示例#1
0
    def invalidated(self,
                    targets,
                    only_buildfiles=False,
                    invalidate_dependants=False,
                    partition_size_hint=sys.maxint):
        """Checks targets for invalidation. Subclasses call this to figure out what to work on.

    targets: The targets to check for changes.

    only_buildfiles: If True, then only the target's BUILD files are checked for changes, not its sources.

    invalidate_dependants: If True then any targets depending on changed targets are invalidated.

    partition_size_hint: Each VersionedTargetSet in the yielded list will represent targets containing roughly
    this number of source files, if possible. Set to sys.maxint for a single VersionedTargetSet. Set to 0 for
    one VersionedTargetSet per target. It is up to the caller to do the right thing with whatever partitioning
    it asks for.

    Yields an InvalidationCheck object reflecting the (partitioned) targets. If no exceptions are
    thrown by work in the block, the cache is updated for the targets.
    """
        extra_data = []
        extra_data.append(self.invalidate_for())

        for f in self.invalidate_for_files():
            sha = hashlib.sha1()
            with open(f, "rb") as fd:
                sha.update(fd.read())
            extra_data.append(sha.hexdigest())

        cache_manager = CacheManager(self._cache_key_generator,
                                     self._build_invalidator_dir,
                                     invalidate_dependants, extra_data,
                                     only_buildfiles)

        invalidation_check = cache_manager.check(targets, partition_size_hint)

        num_invalid_partitions = len(
            invalidation_check.invalid_vts_partitioned)
        num_invalid_targets = 0
        num_invalid_sources = 0
        for vt in invalidation_check.invalid_vts:
            if not vt.valid:
                num_invalid_targets += len(vt.targets)
                num_invalid_sources += vt.cache_key.num_sources

        # Do some reporting.
        if num_invalid_partitions > 0:
            self.context.log.info('Operating on %d files in %d invalidated targets in %d target partitions' % \
                                  (num_invalid_sources, num_invalid_targets, num_invalid_partitions))

        # Yield the result, and then update the cache.
        yield invalidation_check
        if not self.dry_run:
            for vt in invalidation_check.invalid_vts:
                vt.update()  # In case the caller doesn't update.
示例#2
0
    def invalidated(self,
                    targets,
                    only_buildfiles=False,
                    invalidate_dependants=False):
        """
      Checks targets for invalidation.

      Yields the result to a with block. If no exceptions are thrown by work in the block, the
      cache is updated for the targets.

      :targets The targets to check for changes.
      :only_buildfiles If True, then just the target's BUILD files are checked for changes.
      :invalidate_dependants If True then any targets depending on changed targets are invalidated.
      :returns: an InvalidationResult reflecting the invalidated targets.
    """
        # invalidate_for() may return an iterable that isn't a set, so we ensure a set here.
        extra_data = []
        extra_data.append(self.invalidate_for())

        for f in self.invalidate_for_files():
            sha = hashlib.sha1()
            with open(f, "rb") as fd:
                sha.update(fd.read())
            extra_data.append(sha.hexdigest())

        cache_manager = CacheManager(self._cache_key_generator,
                                     self._build_invalidator_dir, targets,
                                     invalidate_dependants, extra_data,
                                     only_buildfiles)

        # Check for directly changed targets.
        all_versioned_targets = cache_manager.check(targets)
        invalidation_result = InvalidationResult(cache_manager,
                                                 all_versioned_targets)
        num_invalid_targets = len(invalidation_result.invalid_targets())

        # Do some reporting.
        if num_invalid_targets > 0:
            num_files = reduce(lambda x, y: x + y, [
                vt.cache_key.num_sources
                for vt in all_versioned_targets if not vt.valid
            ], 0)
            self.context.log.info(
                'Operating on %d files in %d invalidated targets' %
                (num_files, num_invalid_targets))

        # Yield the result, and then update the cache.
        if num_invalid_targets > 0:
            self.context.log.debug('Invalidated targets %s' %
                                   invalidation_result.invalid_targets())
        yield invalidation_result
        for vt in invalidation_result.invalid_versioned_targets():
            cache_manager.update(vt.cache_key)
示例#3
0
  def invalidated(self,
                  targets,
                  only_buildfiles = False,
                  invalidate_dependents = False,
                  partition_size_hint = sys.maxint):
    """Checks targets for invalidation. Subclasses call this to figure out what to work on.

    targets: The targets to check for changes.

    only_buildfiles: If True, then only the target's BUILD files are checked for changes, not its sources.

    invalidate_dependents: If True then any targets depending on changed targets are invalidated.

    partition_size_hint: Each VersionedTargetSet in the yielded list will represent targets containing roughly
    this number of source files, if possible. Set to sys.maxint for a single VersionedTargetSet. Set to 0 for
    one VersionedTargetSet per target. It is up to the caller to do the right thing with whatever partitioning
    it asks for.

    Yields an InvalidationCheck object reflecting the (partitioned) targets. If no exceptions are
    thrown by work in the block, the cache is updated for the targets.
    """
    extra_data = []
    extra_data.append(self.invalidate_for())

    for f in self.invalidate_for_files():
      sha = hashlib.sha1()
      with open(f, "rb") as fd:
        sha.update(fd.read())
      extra_data.append(sha.hexdigest())

    cache_manager = CacheManager(self._cache_key_generator, self._build_invalidator_dir,
      invalidate_dependents, extra_data, only_buildfiles)

    invalidation_check = cache_manager.check(targets, partition_size_hint)

    num_invalid_partitions = len(invalidation_check.invalid_vts_partitioned)
    num_invalid_targets = 0
    num_invalid_sources = 0
    for vt in invalidation_check.invalid_vts:
      if not vt.valid:
        num_invalid_targets += len(vt.targets)
        num_invalid_sources += vt.cache_key.num_sources

    # Do some reporting.
    if num_invalid_partitions > 0:
      self.context.log.info('Operating on %d files in %d invalidated targets in %d target partitions' % \
                            (num_invalid_sources, num_invalid_targets, num_invalid_partitions))

    # Yield the result, and then update the cache.
    yield invalidation_check
    if not self.dry_run:
      for vt in invalidation_check.invalid_vts:
        vt.update()  # In case the caller doesn't update.
示例#4
0
  def invalidated(self, targets, only_buildfiles=False, invalidate_dependants=False):
    """
      Checks targets for invalidation.

      Yields the result to a with block. If no exceptions are thrown by work in the block, the
      cache is updated for the targets.

      :targets The targets to check for changes.
      :only_buildfiles If True, then just the target's BUILD files are checked for changes.
      :invalidate_dependants If True then any targets depending on changed targets are invalidated.
      :returns: an InvalidationResult reflecting the invalidated targets.
    """
    # invalidate_for() may return an iterable that isn't a set, so we ensure a set here.
    extra_data = []
    extra_data.append(self.invalidate_for())

    for f in self.invalidate_for_files():
      sha = hashlib.sha1()
      with open(f, "rb") as fd:
        sha.update(fd.read())
      extra_data.append(sha.hexdigest())

    cache_manager = CacheManager(self._cache_key_generator, self._build_invalidator_dir,
      targets, invalidate_dependants, extra_data, only_buildfiles)

    # Check for directly changed targets.
    all_versioned_targets = cache_manager.check(targets)
    invalidation_result = InvalidationResult(cache_manager, all_versioned_targets)
    num_invalid_targets = len(invalidation_result.invalid_targets())

    # Do some reporting.
    if num_invalid_targets > 0:
      num_files = reduce(lambda x, y: x + y,
        [vt.cache_key.num_sources for vt in all_versioned_targets if not vt.valid], 0)
      self.context.log.info('Operating on %d files in %d invalidated targets' % (num_files, num_invalid_targets))

    # Yield the result, and then update the cache.
    if num_invalid_targets > 0:
      self.context.log.debug('Invalidated targets %s' % invalidation_result.invalid_targets())
    yield invalidation_result
    for vt in invalidation_result.invalid_versioned_targets():
      cache_manager.update(vt.cache_key)
示例#5
0
  def invalidated(self, targets, only_buildfiles=False, invalidate_dependents=False,
                  partition_size_hint=sys.maxint, silent=False):
    """Checks targets for invalidation, first checking the artifact cache.
    Subclasses call this to figure out what to work on.

    targets:               The targets to check for changes.
    only_buildfiles:       If True, then only the target's BUILD files are checked for changes, not
                           its sources.
    invalidate_dependents: If True then any targets depending on changed targets are invalidated.
    partition_size_hint:   Each VersionedTargetSet in the yielded list will represent targets
                           containing roughly this number of source files, if possible. Set to
                           sys.maxint for a single VersionedTargetSet. Set to 0 for one
                           VersionedTargetSet per target. It is up to the caller to do the right
                           thing with whatever partitioning it asks for.

    Yields an InvalidationCheck object reflecting the (partitioned) targets.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.
    """
    extra_data = []
    extra_data.append(self.invalidate_for())

    for f in self.invalidate_for_files():
      extra_data.append(hash_file(f))

    cache_manager = CacheManager(self._cache_key_generator,
                                 self._build_invalidator_dir,
                                 invalidate_dependents,
                                 extra_data,
                                 only_externaldeps=only_buildfiles)

    invalidation_check = cache_manager.check(targets, partition_size_hint)

    if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled():
      with self.context.new_workunit('cache'):
        cached_vts, uncached_vts = \
          self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
      if cached_vts:
        cached_targets = [vt.target for vt in cached_vts]
        for t in cached_targets:
          self.context.run_tracker.artifact_cache_stats.add_hit('default', t)
        if not silent:
          self._report_targets('Using cached artifacts for ', cached_targets, '.')
      if uncached_vts:
        uncached_targets = [vt.target for vt in uncached_vts]
        for t in uncached_targets:
          self.context.run_tracker.artifact_cache_stats.add_miss('default', t)
        if not silent:
          self._report_targets('No cached artifacts for ', uncached_targets, '.')
      # Now that we've checked the cache, re-partition whatever is still invalid.
      invalidation_check = \
        InvalidationCheck(invalidation_check.all_vts, uncached_vts, partition_size_hint)

    if not silent:
      targets = []
      sources = []
      num_invalid_partitions = len(invalidation_check.invalid_vts_partitioned)
      for vt in invalidation_check.invalid_vts_partitioned:
        targets.extend(vt.targets)
        sources.extend(vt.cache_key.sources)
      if len(targets):
        msg_elements = ['Invalidated ',
                        items_to_report_element([t.address.reference() for t in targets], 'target')]
        if len(sources) > 0:
          msg_elements.append(' containing ')
          msg_elements.append(items_to_report_element(sources, 'source file'))
        if num_invalid_partitions > 1:
          msg_elements.append(' in %d target partitions' % num_invalid_partitions)
        msg_elements.append('.')
        self.context.log.info(*msg_elements)

    # Yield the result, and then mark the targets as up to date.
    yield invalidation_check
    if not self.dry_run:
      for vt in invalidation_check.invalid_vts:
        vt.update()  # In case the caller doesn't update.
 def __init__(self, tmpdir):
   CacheManager.__init__(self, AppendingCacheKeyGenerator(), tmpdir, True, None, False)
示例#7
0
  def invalidated(self, targets, only_buildfiles=False, invalidate_dependents=False,
                  partition_size_hint=sys.maxint):
    """Checks targets for invalidation, first checking the artifact cache.
    Subclasses call this to figure out what to work on.

    targets:               The targets to check for changes.
    only_buildfiles:       If True, then only the target's BUILD files are checked for changes, not
                           its sources.
    invalidate_dependents: If True then any targets depending on changed targets are invalidated.
    partition_size_hint:   Each VersionedTargetSet in the yielded list will represent targets
                           containing roughly this number of source files, if possible. Set to
                           sys.maxint for a single VersionedTargetSet. Set to 0 for one
                           VersionedTargetSet per target. It is up to the caller to do the right
                           thing with whatever partitioning it asks for.

    Yields an InvalidationCheck object reflecting the (partitioned) targets.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.
    """
    with self.context.new_workunit('invalidation'):
      extra_data = []
      extra_data.append(self.invalidate_for())

      for f in self.invalidate_for_files():
        extra_data.append(hash_file(f))

      cache_manager = CacheManager(self._cache_key_generator,
                                   self._build_invalidator_dir,
                                   invalidate_dependents,
                                   extra_data,
                                   only_externaldeps=only_buildfiles)

      invalidation_check = cache_manager.check(targets, partition_size_hint)

    # See if we have entire partitions cached.
    if invalidation_check.invalid_vts and self._artifact_cache and \
        self.context.options.read_from_artifact_cache:
      with self.context.new_workunit('cache'):
        all_cached_targets = []
        partitions_to_check = \
          [vt for vt in invalidation_check.all_vts_partitioned if not vt.valid]
        cached_partitions, uncached_partitions = self.check_artifact_cache(partitions_to_check)
        for vt in cached_partitions:
          for t in vt.targets:
            all_cached_targets.append(t)

        # See if we have any individual targets from the uncached partitions.
        vts_to_check = [vt for vt in itertools.chain.from_iterable(
          [x.versioned_targets for x in uncached_partitions]) if not vt.valid]
        cached_targets, uncached_targets = self.check_artifact_cache(vts_to_check)
        for vt in cached_targets:
          all_cached_targets.append(vt.target)

      if all_cached_targets:
        # Do some reporting.
        for t in all_cached_targets:
          self.context.run_tracker.artifact_cache_stats.add_hit('default', t)
        self._report_targets('Using cached artifacts for ', all_cached_targets, '.')

      # Now that we've checked the cache, re-partition whatever is still invalid.
      if uncached_targets:
        for vts in uncached_targets:
          self.context.run_tracker.artifact_cache_stats.add_miss('default', vts.target)
        self._report_targets('No cached artifacts for ',
                             [vt.target for vt in uncached_targets], '.')
      invalidation_check = \
        InvalidationCheck(invalidation_check.all_vts, uncached_targets, partition_size_hint)

    # Do some reporting.
    targets = []
    sources = []
    num_invalid_partitions = len(invalidation_check.invalid_vts_partitioned)
    for vt in invalidation_check.invalid_vts_partitioned:
      targets.extend(vt.targets)
      sources.extend(vt.cache_key.sources)
    if len(targets):
      msg_elements = ['Invalidated ',
                      items_to_report_element([t.address.reference() for t in targets], 'target')]
      if len(sources) > 0:
        msg_elements.append(' containing ')
        msg_elements.append(items_to_report_element(sources, 'source file'))
      if num_invalid_partitions > 1:
        msg_elements.append(' in %d target partitions' % num_invalid_partitions)
      msg_elements.append('.')
      self.context.log.info(*msg_elements)

    # Yield the result, and then mark the targets as up to date.
    yield invalidation_check
    if not self.dry_run:
      for vt in invalidation_check.invalid_vts:
        vt.update()  # In case the caller doesn't update.
示例#8
0
  def invalidated(self, targets, only_buildfiles=False, invalidate_dependents=False,
                  partition_size_hint=sys.maxint):
    """Checks targets for invalidation, first checking the artifact cache.
    Subclasses call this to figure out what to work on.

    targets: The targets to check for changes.

    only_buildfiles: If True, then only the target's BUILD files are checked for changes,
                     not its sources.

    invalidate_dependents: If True then any targets depending on changed targets are invalidated.

    partition_size_hint: Each VersionedTargetSet in the yielded list will represent targets
                         containing roughly this number of source files, if possible. Set to
                         sys.maxint for a single VersionedTargetSet. Set to 0 for one
                         VersionedTargetSet per target. It is up to the caller to do the right
                         thing with whatever partitioning it asks for.

    Yields an InvalidationCheck object reflecting the (partitioned) targets.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated, that must be done manually.
    """
    extra_data = []
    extra_data.append(self.invalidate_for())

    for f in sorted(self.invalidate_for_files()):
      sha = hashlib.sha1()
      with open(f, "rb") as fd:
        sha.update(fd.read())
      extra_data.append(sha.hexdigest())

    cache_manager = CacheManager(self._cache_key_generator, self._build_invalidator_dir,
      invalidate_dependents, extra_data, only_externaldeps=only_buildfiles)

    initial_invalidation_check = cache_manager.check(targets, partition_size_hint)

    # See if we have entire partitions cached.
    partitions_to_check = \
      [vt for vt in initial_invalidation_check.all_vts_partitioned if not vt.valid]
    cached_partitions, uncached_partitions = self.check_artifact_cache(partitions_to_check)

    # See if we have any individual targets from the uncached partitions.
    vts_to_check = [vt for vt in itertools.chain.from_iterable(
      [x.versioned_targets for x in uncached_partitions]) if not vt.valid]
    cached_targets, uncached_targets = self.check_artifact_cache(vts_to_check)

    # Now that we've checked the cache, re-partition whatever is still invalid.
    invalidation_check = \
      InvalidationCheck(initial_invalidation_check.all_vts, uncached_targets, partition_size_hint)

    # Do some reporting.
    num_invalid_partitions = len(invalidation_check.invalid_vts_partitioned)
    num_invalid_targets = 0
    num_invalid_sources = 0
    for vt in invalidation_check.invalid_vts:
      if not vt.valid:
        num_invalid_targets += len(vt.targets)
        num_invalid_sources += vt.cache_key.num_sources
    if num_invalid_partitions > 0:
      self.context.log.info('Operating on %d files in %d invalidated targets in %d ' \
                            'target partitions' % \
                            (num_invalid_sources, num_invalid_targets, num_invalid_partitions))

    # Yield the result, and then mark the targets as up to date.
    yield invalidation_check
    if not self.dry_run:
      for vt in invalidation_check.invalid_vts:
        vt.update()  # In case the caller doesn't update.
示例#9
0
  def invalidated_with_artifact_cache_check(self,
                                            targets,
                                            only_buildfiles = False,
                                            invalidate_dependents = False,
                                            partition_size_hint = sys.maxint):
    """Checks targets for invalidation, first checking the artifact cache.
    Subclasses call this to figure out what to work on.

    targets: The targets to check for changes.

    only_buildfiles: If True, then only the target's BUILD files are checked for changes, not its sources.

    invalidate_dependents: If True then any targets depending on changed targets are invalidated.

    partition_size_hint: Each VersionedTargetSet in the yielded list will represent targets containing roughly
    this number of source files, if possible. Set to sys.maxint for a single VersionedTargetSet. Set to 0 for
    one VersionedTargetSet per target. It is up to the caller to do the right thing with whatever partitioning
    it asks for.

    Yields a pair of (invalidation_check, cached_vts) where invalidation_check is an InvalidationCheck object
    reflecting the (partitioned) targets, and cached_vts is a list of VersionedTargets that were satisfied
    from the artifact cache.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated, that must be done manually.
    """
    extra_data = []
    extra_data.append(self.invalidate_for())

    for f in self.invalidate_for_files():
      sha = hashlib.sha1()
      with open(f, "rb") as fd:
        sha.update(fd.read())
      extra_data.append(sha.hexdigest())

    cache_manager = CacheManager(self._cache_key_generator, self._build_invalidator_dir,
      invalidate_dependents, extra_data, only_externaldeps=only_buildfiles)

    unpartitioned_invalidation_check = cache_manager.check(targets)
    cached_vts = []
    if self._artifact_cache and self.context.options.read_from_artifact_cache:
      for vt in unpartitioned_invalidation_check.invalid_vts:
        if self._artifact_cache.use_cached_files(vt.cache_key):
          self.context.log.info('Using cached artifacts for %s' % vt.targets)
          vt.update()
          cached_vts.append(vt)
        else:
          self.context.log.info('No cached artifacts for %s' % vt.targets)

    invalid_vts = list(OrderedSet(unpartitioned_invalidation_check.invalid_vts) - set(cached_vts))
    # Now that we've checked the cache, partition whatever is still invalid.
    partitioned_invalidation_check = \
      InvalidationCheck(unpartitioned_invalidation_check.all_vts, invalid_vts, partition_size_hint)

    num_invalid_partitions = len(partitioned_invalidation_check.invalid_vts_partitioned)
    num_invalid_targets = 0
    num_invalid_sources = 0
    for vt in partitioned_invalidation_check.invalid_vts:
      if not vt.valid:
        num_invalid_targets += len(vt.targets)
        num_invalid_sources += vt.cache_key.num_sources

    # Do some reporting.
    if num_invalid_partitions > 0:
      self.context.log.info('Operating on %d files in %d invalidated targets in %d target partitions' % \
                            (num_invalid_sources, num_invalid_targets, num_invalid_partitions))

    # Yield the result, and then update the cache.
    yield partitioned_invalidation_check, cached_vts
    if not self.dry_run:
      for vt in partitioned_invalidation_check.invalid_vts:
        vt.update()  # In case the caller doesn't update.
示例#10
0
    def invalidated(self,
                    targets,
                    only_buildfiles=False,
                    invalidate_dependents=False,
                    partition_size_hint=sys.maxint):
        """Checks targets for invalidation, first checking the artifact cache.
    Subclasses call this to figure out what to work on.

    targets: The targets to check for changes.

    only_buildfiles: If True, then only the target's BUILD files are checked for changes,
                     not its sources.

    invalidate_dependents: If True then any targets depending on changed targets are invalidated.

    partition_size_hint: Each VersionedTargetSet in the yielded list will represent targets
                         containing roughly this number of source files, if possible. Set to
                         sys.maxint for a single VersionedTargetSet. Set to 0 for one
                         VersionedTargetSet per target. It is up to the caller to do the right
                         thing with whatever partitioning it asks for.

    Yields an InvalidationCheck object reflecting the (partitioned) targets.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated, that must be done manually.
    """
        extra_data = []
        extra_data.append(self.invalidate_for())

        for f in sorted(self.invalidate_for_files()):
            sha = hashlib.sha1()
            with open(f, "rb") as fd:
                sha.update(fd.read())
            extra_data.append(sha.hexdigest())

        cache_manager = CacheManager(self._cache_key_generator,
                                     self._build_invalidator_dir,
                                     invalidate_dependents,
                                     extra_data,
                                     only_externaldeps=only_buildfiles)

        initial_invalidation_check = cache_manager.check(
            targets, partition_size_hint)

        # See if we have entire partitions cached.
        partitions_to_check = \
          [vt for vt in initial_invalidation_check.all_vts_partitioned if not vt.valid]
        cached_partitions, uncached_partitions = self.check_artifact_cache(
            partitions_to_check)

        # See if we have any individual targets from the uncached partitions.
        vts_to_check = [
            vt for vt in itertools.chain.from_iterable(
                [x.versioned_targets for x in uncached_partitions])
            if not vt.valid
        ]
        cached_targets, uncached_targets = self.check_artifact_cache(
            vts_to_check)

        # Now that we've checked the cache, re-partition whatever is still invalid.
        invalidation_check = \
          InvalidationCheck(initial_invalidation_check.all_vts, uncached_targets, partition_size_hint)

        # Do some reporting.
        num_invalid_partitions = len(
            invalidation_check.invalid_vts_partitioned)
        num_invalid_targets = 0
        num_invalid_sources = 0
        for vt in invalidation_check.invalid_vts:
            if not vt.valid:
                num_invalid_targets += len(vt.targets)
                num_invalid_sources += vt.cache_key.num_sources
        if num_invalid_partitions > 0:
            self.context.log.info('Operating on %d files in %d invalidated targets in %d ' \
                                  'target partitions' % \
                                  (num_invalid_sources, num_invalid_targets, num_invalid_partitions))

        # Yield the result, and then mark the targets as up to date.
        yield invalidation_check
        if not self.dry_run:
            for vt in invalidation_check.invalid_vts:
                vt.update()  # In case the caller doesn't update.
示例#11
0
 def __init__(self, tmpdir):
     CacheManager.__init__(self, AppendingCacheKeyGenerator(), tmpdir,
                           True, None, False)