Пример #1
0
  def _process_target_partition(self, partition, classpath):
    """Needs invoking only on invalid targets.

    partition - a triple (vts, sources_by_target, analysis_file).
    classpath - a list of classpath entries.

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
    (vts, sources, analysis_file) = partition

    if not sources:
      self.context.log.warn('Skipping %s compile for targets with no sources:\n  %s'
                            % (self._language, vts.targets))
    else:
      # Do some reporting.
      self.context.log.info(
        'Compiling a partition containing ',
        items_to_report_element(sources, 'source'),
        ' in ',
        items_to_report_element([t.address.reference() for t in vts.targets], 'target'), '.')
      with self.context.new_workunit('compile'):
        # The compiler may delete classfiles, then later exit on a compilation error. Then if the
        # change triggering the error is reverted, we won't rebuild to restore the missing
        # classfiles. So we force-invalidate here, to be on the safe side.
        vts.force_invalidate()
        self.compile(self._args, classpath, sources, self._classes_dir, analysis_file)
Пример #2
0
    def render_cache_stats(artifact_cache_stats):
      def fix_detail_id(e, _id):
        return e if isinstance(e, basestring) else e + (_id, )

      msg_elements = []
      for cache_name, stat in artifact_cache_stats.stats_per_cache.items():
        msg_elements.extend([
          cache_name + ' artifact cache: ',
          # Explicitly set the detail ids, so their displayed/hidden state survives a refresh.
          fix_detail_id(items_to_report_element(stat.hit_targets, 'hit'), 'cache-hit-details'),
          ', ',
          fix_detail_id(items_to_report_element(stat.miss_targets, 'miss'), 'cache-miss-details'),
          '.'
        ])
      if not msg_elements:
        msg_elements = ['No artifact cache use.']
      return self._render_message(*msg_elements)
Пример #3
0
    def render_cache_stats(artifact_cache_stats):
      def fix_detail_id(e, _id):
        return e if isinstance(e, basestring) else e + (_id, )

      msg_elements = []
      for cache_name, stat in artifact_cache_stats.stats_per_cache.items():
        msg_elements.extend([
          cache_name + ' artifact cache: ',
          # Explicitly set the detail ids, so their displayed/hidden state survives a refresh.
          fix_detail_id(items_to_report_element(stat.hit_targets, 'hit'), 'cache-hit-details'),
          ', ',
          fix_detail_id(items_to_report_element(stat.miss_targets, 'miss'), 'cache-miss-details'),
          '.'
        ])
      if not msg_elements:
        msg_elements = ['No artifact cache use.']
      return self._render_message(*msg_elements)
Пример #4
0
  def _process_target_partition(self, vts, cp):
    sources_by_target = self._compute_sources_by_target(vts.targets)
    sources = list(itertools.chain.from_iterable(sources_by_target.values()))
    fingerprint = Target.identify(vts.targets)

    if not sources:
      self.context.log.warn('Skipping java compile for targets with no sources:\n  %s' % vts.targets)
    else:
      # Do some reporting.
      self.context.log.info(
        'Operating on a partition containing ',
        items_to_report_element(vts.cache_key.sources, 'source'),
        ' in ',
        items_to_report_element([t.address.reference() for t in vts.targets], 'target'), '.')
      classpath = [jar for conf, jar in cp if conf in self._confs]
      result = self.compile(classpath, sources, fingerprint, self._depfile)
      if result != 0:
        default_message = 'Unexpected error - %s returned %d' % (_JMAKE_MAIN, result)
        raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message))
    return sources_by_target
Пример #5
0
    def _process_target_partition(self, partition, cp):
        """Needs invoking only on invalid targets.

    partition - a triple (vts, sources_by_target, analysis_file).

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
        (vts, sources, analysis_file) = partition

        if not sources:
            self.context.log.warn(
                'Skipping scala compile for targets with no sources:\n  %s' %
                vts.targets)
        else:
            # Do some reporting.
            self.context.log.info(
                'Compiling a partition containing ',
                items_to_report_element(sources, 'source'), ' in ',
                items_to_report_element(
                    [t.address.reference() for t in vts.targets], 'target'),
                '.')
            classpath = [entry for conf, entry in cp if conf in self._confs]
            with self.context.new_workunit('compile'):
                # Zinc may delete classfiles, then later exit on a compilation error. Then if the
                # change triggering the error is reverted, we won't rebuild to restore the missing
                # classfiles. So we force-invalidate here, to be on the safe side.
                # TODO: Do we still need this? Zinc has a safe mode now, but it might be very expensive,
                # as it backs up class files.
                vts.force_invalidate()

                # We have to treat our output dir as an upstream element, so zinc can find valid
                # analysis for previous partitions.
                classpath.append(self._classes_dir)
                upstream = {self._classes_dir: self._analysis_file}
                if self._zinc_utils.compile(classpath, sources,
                                            self._classes_dir, analysis_file,
                                            upstream):
                    raise TaskError('Compile failed.')
Пример #6
0
    def _process_target_partition(self, partition, cp):
        """Needs invoking only on invalid targets.

    partition - a triple (vts, sources_by_target, analysis_file).

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
        (vts, sources, analysis_file) = partition

        if not sources:
            self.context.log.warn("Skipping scala compile for targets with no sources:\n  %s" % vts.targets)
        else:
            # Do some reporting.
            self.context.log.info(
                "Compiling a partition containing ",
                items_to_report_element(sources, "source"),
                " in ",
                items_to_report_element([t.address.reference() for t in vts.targets], "target"),
                ".",
            )
            classpath = [entry for conf, entry in cp if conf in self._confs]
            with self.context.new_workunit("compile"):
                # Zinc may delete classfiles, then later exit on a compilation error. Then if the
                # change triggering the error is reverted, we won't rebuild to restore the missing
                # classfiles. So we force-invalidate here, to be on the safe side.
                # TODO: Do we still need this? Zinc has a safe mode now, but it might be very expensive,
                # as it backs up class files.
                vts.force_invalidate()

                # We have to treat our output dir as an upstream element, so zinc can find valid
                # analysis for previous partitions.
                classpath.append(self._classes_dir)
                upstream = {self._classes_dir: self._analysis_file}
                if self._zinc_utils.compile(classpath, sources, self._classes_dir, analysis_file, upstream):
                    raise TaskError("Compile failed.")
Пример #7
0
 def _report_targets(self, prefix, targets, suffix):
   self.context.log.info(
     prefix,
     items_to_report_element([t.address.reference() for t in targets], 'target'),
     suffix)
Пример #8
0
  def invalidated(self, targets, only_buildfiles=False, invalidate_dependents=False,
                  partition_size_hint=sys.maxint, silent=False):
    """Checks targets for invalidation, first checking the artifact cache.
    Subclasses call this to figure out what to work on.

    targets:               The targets to check for changes.
    only_buildfiles:       If True, then only the target's BUILD files are checked for changes, not
                           its sources.
    invalidate_dependents: If True then any targets depending on changed targets are invalidated.
    partition_size_hint:   Each VersionedTargetSet in the yielded list will represent targets
                           containing roughly this number of source files, if possible. Set to
                           sys.maxint for a single VersionedTargetSet. Set to 0 for one
                           VersionedTargetSet per target. It is up to the caller to do the right
                           thing with whatever partitioning it asks for.

    Yields an InvalidationCheck object reflecting the (partitioned) targets.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.
    """
    extra_data = []
    extra_data.append(self.invalidate_for())

    for f in self.invalidate_for_files():
      extra_data.append(hash_file(f))

    cache_manager = CacheManager(self._cache_key_generator,
                                 self._build_invalidator_dir,
                                 invalidate_dependents,
                                 extra_data,
                                 only_externaldeps=only_buildfiles)

    invalidation_check = cache_manager.check(targets, partition_size_hint)

    if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled():
      with self.context.new_workunit('cache'):
        cached_vts, uncached_vts = \
          self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
      if cached_vts:
        cached_targets = [vt.target for vt in cached_vts]
        for t in cached_targets:
          self.context.run_tracker.artifact_cache_stats.add_hit('default', t)
        if not silent:
          self._report_targets('Using cached artifacts for ', cached_targets, '.')
      if uncached_vts:
        uncached_targets = [vt.target for vt in uncached_vts]
        for t in uncached_targets:
          self.context.run_tracker.artifact_cache_stats.add_miss('default', t)
        if not silent:
          self._report_targets('No cached artifacts for ', uncached_targets, '.')
      # Now that we've checked the cache, re-partition whatever is still invalid.
      invalidation_check = \
        InvalidationCheck(invalidation_check.all_vts, uncached_vts, partition_size_hint)

    if not silent:
      targets = []
      sources = []
      num_invalid_partitions = len(invalidation_check.invalid_vts_partitioned)
      for vt in invalidation_check.invalid_vts_partitioned:
        targets.extend(vt.targets)
        sources.extend(vt.cache_key.sources)
      if len(targets):
        msg_elements = ['Invalidated ',
                        items_to_report_element([t.address.reference() for t in targets], 'target')]
        if len(sources) > 0:
          msg_elements.append(' containing ')
          msg_elements.append(items_to_report_element(sources, 'source file'))
        if num_invalid_partitions > 1:
          msg_elements.append(' in %d target partitions' % num_invalid_partitions)
        msg_elements.append('.')
        self.context.log.info(*msg_elements)

    # Yield the result, and then mark the targets as up to date.
    yield invalidation_check
    if not self.dry_run:
      for vt in invalidation_check.invalid_vts:
        vt.update()  # In case the caller doesn't update.
Пример #9
0
 def _report_targets(self, prefix, targets, suffix):
     self.context.log.info(
         prefix,
         items_to_report_element([t.address.reference() for t in targets],
                                 'target'), suffix)
Пример #10
0
    def invalidated(self,
                    targets,
                    only_buildfiles=False,
                    invalidate_dependents=False,
                    partition_size_hint=sys.maxint,
                    silent=False):
        """Checks targets for invalidation, first checking the artifact cache.
    Subclasses call this to figure out what to work on.

    targets:               The targets to check for changes.
    only_buildfiles:       If True, then only the target's BUILD files are checked for changes, not
                           its sources.
    invalidate_dependents: If True then any targets depending on changed targets are invalidated.
    partition_size_hint:   Each VersionedTargetSet in the yielded list will represent targets
                           containing roughly this number of source files, if possible. Set to
                           sys.maxint for a single VersionedTargetSet. Set to 0 for one
                           VersionedTargetSet per target. It is up to the caller to do the right
                           thing with whatever partitioning it asks for.

    Yields an InvalidationCheck object reflecting the (partitioned) targets.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.
    """
        extra_data = [self.invalidate_for()]

        for f in self.invalidate_for_files():
            extra_data.append(hash_file(f))

        cache_manager = CacheManager(self._cache_key_generator,
                                     self._build_invalidator_dir,
                                     invalidate_dependents,
                                     extra_data,
                                     only_externaldeps=only_buildfiles)

        invalidation_check = cache_manager.check(targets, partition_size_hint)

        if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled(
        ):
            with self.context.new_workunit('cache'):
                cached_vts, uncached_vts = \
                  self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
            if cached_vts:
                cached_targets = [vt.target for vt in cached_vts]
                for t in cached_targets:
                    self.context.run_tracker.artifact_cache_stats.add_hit(
                        'default', t)
                if not silent:
                    self._report_targets('Using cached artifacts for ',
                                         cached_targets, '.')
            if uncached_vts:
                uncached_targets = [vt.target for vt in uncached_vts]
                for t in uncached_targets:
                    self.context.run_tracker.artifact_cache_stats.add_miss(
                        'default', t)
                if not silent:
                    self._report_targets('No cached artifacts for ',
                                         uncached_targets, '.')
            # Now that we've checked the cache, re-partition whatever is still invalid.
            invalidation_check = \
              InvalidationCheck(invalidation_check.all_vts, uncached_vts, partition_size_hint)

        if not silent:
            targets = []
            sources = []
            num_invalid_partitions = len(
                invalidation_check.invalid_vts_partitioned)
            for vt in invalidation_check.invalid_vts_partitioned:
                targets.extend(vt.targets)
                sources.extend(vt.cache_key.sources)
            if len(targets):
                msg_elements = [
                    'Invalidated ',
                    items_to_report_element(
                        [t.address.reference() for t in targets], 'target')
                ]
                if len(sources) > 0:
                    msg_elements.append(' containing ')
                    msg_elements.append(
                        items_to_report_element(sources, 'source file'))
                if num_invalid_partitions > 1:
                    msg_elements.append(' in %d target partitions' %
                                        num_invalid_partitions)
                msg_elements.append('.')
                self.context.log.info(*msg_elements)

        # Yield the result, and then mark the targets as up to date.
        yield invalidation_check
        if not self.dry_run:
            for vt in invalidation_check.invalid_vts:
                vt.update()  # In case the caller doesn't update.
Пример #11
0
    def _process_target_partition(self, vts, cp, upstream_analysis_map):
        """Must run on all target partitions, not just invalid ones.

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
        artifacts = [
            self._artifact_factory.artifact_for_target(target)
            for target in vts.targets
        ]
        merged_artifact = self._artifact_factory.merged_artifact(artifacts)

        if not merged_artifact.sources:
            self.context.log.warn(
                'Skipping scala compile for targets with no sources:\n  %s' %
                merged_artifact.targets)
        else:
            # Get anything we have from previous builds (or we pulled from the artifact cache).
            # We must do this even if we're not going to compile, because the merged output dir
            # will go on the classpath of downstream tasks. We can't put the per-target dirs
            # on the classpath because Zinc doesn't handle large numbers of upstream deps well.
            current_state = merged_artifact.merge(force=not vts.valid)

            # Note: vts.valid tells us if the merged artifact is valid. If not, we recreate it
            # above. [not vt.valid for vt in vts.versioned_targets] tells us if anything needs
            # to be recompiled. The distinction is important: all the underlying targets may be
            # valid because they were built in some other pants run with different partitions,
            # but this partition may still be invalid and need merging.

            # Invoke the compiler if needed.
            if any([not vt.valid for vt in vts.versioned_targets]):
                # Do some reporting.
                self.context.log.info(
                    'Operating on a partition containing ',
                    items_to_report_element(vts.cache_key.sources, 'source'),
                    ' in ',
                    items_to_report_element(
                        [t.address.reference() for t in vts.targets],
                        'target'), '.')
                old_state = current_state
                classpath = [
                    entry for conf, entry in cp if conf in self._confs
                ]
                with self.context.new_workunit('compile'):
                    # Zinc may delete classfiles, then later exit on a compilation error. Then if the
                    # change triggering the error is reverted, we won't rebuild to restore the missing
                    # classfiles. So we force-invalidate here, to be on the safe side.
                    vts.force_invalidate()
                    if self._zinc_utils.compile(classpath,
                                                merged_artifact.sources,
                                                merged_artifact.classes_dir,
                                                merged_artifact.analysis_file,
                                                upstream_analysis_map):
                        raise TaskError('Compile failed.')

                write_to_artifact_cache = self._artifact_cache and \
                                          self.context.options.write_to_artifact_cache
                current_state = merged_artifact.split(
                    old_state, portable=write_to_artifact_cache)

                if write_to_artifact_cache:
                    # Write the entire merged artifact, and each individual split artifact,
                    # to the artifact cache, if needed.
                    vts_artifact_pairs = zip(vts.versioned_targets,
                                             artifacts) + [
                                                 (vts, merged_artifact)
                                             ]
                    self._update_artifact_cache(vts_artifact_pairs)

            # Register the products, if needed. TODO: Make sure this is safe to call concurrently.
            # In practice the GIL will make it fine, but relying on that is insanitary.
            if self.context.products.isrequired('classes'):
                self._add_products_to_genmap(merged_artifact, current_state)
        return merged_artifact
Пример #12
0
    def _process_target_partition(self, vts, cp, upstream_analysis_map):
        """Must run on all target partitions, not just invalid ones.

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
        artifacts = [self._artifact_factory.artifact_for_target(target) for target in vts.targets]
        merged_artifact = self._artifact_factory.merged_artifact(artifacts)

        if not merged_artifact.sources:
            self.context.log.warn("Skipping scala compile for targets with no sources:\n  %s" % merged_artifact.targets)
        else:
            # Get anything we have from previous builds (or we pulled from the artifact cache).
            # We must do this even if we're not going to compile, because the merged output dir
            # will go on the classpath of downstream tasks. We can't put the per-target dirs
            # on the classpath because Zinc doesn't handle large numbers of upstream deps well.
            current_state = merged_artifact.merge(force=not vts.valid)

            # Note: vts.valid tells us if the merged artifact is valid. If not, we recreate it
            # above. [not vt.valid for vt in vts.versioned_targets] tells us if anything needs
            # to be recompiled. The distinction is important: all the underlying targets may be
            # valid because they were built in some other pants run with different partitions,
            # but this partition may still be invalid and need merging.

            # Invoke the compiler if needed.
            if any([not vt.valid for vt in vts.versioned_targets]):
                # Do some reporting.
                self.context.log.info(
                    "Operating on a partition containing ",
                    items_to_report_element(vts.cache_key.sources, "source"),
                    " in ",
                    items_to_report_element([t.address.reference() for t in vts.targets], "target"),
                    ".",
                )
                old_state = current_state
                classpath = [entry for conf, entry in cp if conf in self._confs]
                with self.context.new_workunit("compile"):
                    # Zinc may delete classfiles, then later exit on a compilation error. Then if the
                    # change triggering the error is reverted, we won't rebuild to restore the missing
                    # classfiles. So we force-invalidate here, to be on the safe side.
                    vts.force_invalidate()
                    if self._zinc_utils.compile(
                        classpath,
                        merged_artifact.sources,
                        merged_artifact.classes_dir,
                        merged_artifact.analysis_file,
                        upstream_analysis_map,
                    ):
                        raise TaskError("Compile failed.")

                write_to_artifact_cache = self._artifact_cache and self.context.options.write_to_artifact_cache
                current_state = merged_artifact.split(old_state, portable=write_to_artifact_cache)

                if write_to_artifact_cache:
                    # Write the entire merged artifact, and each individual split artifact,
                    # to the artifact cache, if needed.
                    vts_artifact_pairs = zip(vts.versioned_targets, artifacts) + [(vts, merged_artifact)]
                    self._update_artifact_cache(vts_artifact_pairs)

            # Register the products, if needed. TODO: Make sure this is safe to call concurrently.
            # In practice the GIL will make it fine, but relying on that is insanitary.
            if self.context.products.isrequired("classes"):
                self._add_products_to_genmap(merged_artifact, current_state)
        return merged_artifact