示例#1
0
    def _process_target_partition(self, partition, classpath):
        """Needs invoking only on invalid targets.

    partition - a triple (vts, sources_by_target, analysis_file).
    classpath - a list of classpath entries.

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
        (vts, sources, analysis_file) = partition

        if not sources:
            self.context.log.warn(
                "Skipping %s compile for targets with no sources:\n  %s" % (self._language, vts.targets)
            )
        else:
            # Do some reporting.
            self.context.log.info(
                "Compiling a partition containing ",
                items_to_report_element(sources, "source"),
                " in ",
                items_to_report_element([t.address.reference() for t in vts.targets], "target"),
                ".",
            )
            with self.context.new_workunit("compile"):
                # The compiler may delete classfiles, then later exit on a compilation error. Then if the
                # change triggering the error is reverted, we won't rebuild to restore the missing
                # classfiles. So we force-invalidate here, to be on the safe side.
                vts.force_invalidate()
                self.compile(self._args, classpath, sources, self._classes_dir, analysis_file)
示例#2
0
    def _process_target_partition(self, partition, classpath):
        """Needs invoking only on invalid targets.

    partition - a triple (vts, sources_by_target, analysis_file).
    classpath - a list of classpath entries.

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
        (vts, sources, analysis_file) = partition

        if not sources:
            self.context.log.warn(
                'Skipping %s compile for targets with no sources:\n  %s' %
                (self._language, vts.targets))
        else:
            # Do some reporting.
            self.context.log.info(
                'Compiling a partition containing ',
                items_to_report_element(sources, 'source'), ' in ',
                items_to_report_element(
                    [t.address.reference() for t in vts.targets], 'target'),
                '.')
            with self.context.new_workunit('compile'):
                # The compiler may delete classfiles, then later exit on a compilation error. Then if the
                # change triggering the error is reverted, we won't rebuild to restore the missing
                # classfiles. So we force-invalidate here, to be on the safe side.
                vts.force_invalidate()
                self.compile(self._args, classpath, sources, self._classes_dir,
                             analysis_file)
示例#3
0
  def _compile_vts(self, vts, sources, analysis_file, upstream_analysis, classpath, outdir,
                   log_file, progress_message, settings):
    """Compiles sources for the given vts into the given output dir.

    vts - versioned target set
    sources - sources for this target set
    analysis_file - the analysis file to manipulate
    classpath - a list of classpath entries
    outdir - the output dir to send classes to

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
    if not sources:
      self.context.log.warn('Skipping {} compile for targets with no sources:\n  {}'
                            .format(self.name(), vts.targets))
    else:
      # Do some reporting.
      self.context.log.info(
        'Compiling ',
        items_to_report_element(sources, '{} source'.format(self.name())),
        ' in ',
        items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
        ' (',
        progress_message,
        ').')
      with self.context.new_workunit('compile', labels=[WorkUnitLabel.COMPILER]):
        # The compiler may delete classfiles, then later exit on a compilation error. Then if the
        # change triggering the error is reverted, we won't rebuild to restore the missing
        # classfiles. So we force-invalidate here, to be on the safe side.
        vts.force_invalidate()
        self.compile(self._args, classpath, sources, outdir, upstream_analysis, analysis_file,
                     log_file, settings)
示例#4
0
    def _compile_vts(self, vts, target, sources, analysis_file,
                     upstream_analysis, classpath, outdir, log_file,
                     progress_message, settings, fatal_warnings,
                     zinc_file_manager, counter):
        """Compiles sources for the given vts into the given output dir.

    vts - versioned target set
    sources - sources for this target set
    analysis_file - the analysis file to manipulate
    classpath - a list of classpath entries
    outdir - the output dir to send classes to

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
        if not sources:
            self.context.log.warn(
                'Skipping {} compile for targets with no sources:\n  {}'.
                format(self.name(), vts.targets))
        else:
            counter_val = str(counter()).rjust(counter.format_length(), b' ')
            counter_str = '[{}/{}] '.format(counter_val, counter.size)
            # Do some reporting.
            self.context.log.info(
                counter_str, 'Compiling ',
                items_to_report_element(sources,
                                        '{} source'.format(self.name())),
                ' in ',
                items_to_report_element(
                    [t.address.reference() for t in vts.targets], 'target'),
                ' (', progress_message, ').')
            with self.context.new_workunit('compile',
                                           labels=[WorkUnitLabel.COMPILER
                                                   ]) as compile_workunit:
                # The compiler may delete classfiles, then later exit on a compilation error. Then if the
                # change triggering the error is reverted, we won't rebuild to restore the missing
                # classfiles. So we force-invalidate here, to be on the safe side.
                vts.force_invalidate()
                if self.get_options().capture_classpath:
                    self._record_compile_classpath(classpath, vts.targets,
                                                   outdir)

                try:
                    self.compile(self._args, classpath, sources, outdir,
                                 upstream_analysis, analysis_file, log_file,
                                 settings, fatal_warnings, zinc_file_manager,
                                 self._get_plugin_map('javac', target),
                                 self._get_plugin_map('scalac', target))
                except TaskError:
                    if self.get_options().suggest_missing_deps:
                        logs = self._find_failed_compile_logs(compile_workunit)
                        if logs:
                            self._find_missing_deps(
                                '\n'.join([
                                    read_file(log).decode('utf-8')
                                    for log in logs
                                ]), target)
                    raise
示例#5
0
    def _compile_vts(self, vts, ctx, upstream_analysis, dependency_classpath,
                     progress_message, settings, compiler_option_sets,
                     zinc_file_manager, counter):
        """Compiles sources for the given vts into the given output dir.

    :param vts: VersionedTargetSet with one entry for the target.
    :param ctx: - A CompileContext instance for the target.
    :param dependency_classpath: A list of classpath entries of type ClasspathEntry for dependencies

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
        if not ctx.sources:
            self.context.log.warn(
                'Skipping {} compile for targets with no sources:\n  {}'.
                format(self.name(), vts.targets))
        else:
            counter_val = str(counter()).rjust(counter.format_length(), ' ')
            counter_str = '[{}/{}] '.format(counter_val, counter.size)
            # Do some reporting.
            self.context.log.info(
                counter_str, 'Compiling ',
                items_to_report_element(ctx.sources,
                                        '{} source'.format(self.name())),
                ' in ',
                items_to_report_element(
                    [t.address.reference() for t in vts.targets], 'target'),
                ' (', progress_message, ').')
            with self.context.new_workunit('compile',
                                           labels=[WorkUnitLabel.COMPILER
                                                   ]) as compile_workunit:
                try:
                    directory_digest = self.compile(
                        ctx,
                        self._args,
                        dependency_classpath,
                        upstream_analysis,
                        settings,
                        compiler_option_sets,
                        zinc_file_manager,
                        self._get_plugin_map('javac', Java.global_instance(),
                                             ctx.target),
                        self._get_plugin_map('scalac',
                                             ScalaPlatform.global_instance(),
                                             ctx.target),
                    )
                    self._capture_logs(compile_workunit, ctx.log_dir)
                    return directory_digest
                except TaskError:
                    if self.get_options().suggest_missing_deps:
                        logs = [
                            path for _, name, _, path in self._find_logs(
                                compile_workunit) if name == self.name()
                        ]
                        if logs:
                            self._find_missing_deps(logs, ctx.target)
                    raise
示例#6
0
  def _compile_vts(self, vts, ctx, upstream_analysis, classpath, progress_message, settings, fatal_warnings,
                   zinc_file_manager, counter):
    """Compiles sources for the given vts into the given output dir.

    :param vts: VersionedTargetSet with one entry for the target.
    :param ctx: - A CompileContext instance for the target.
    :param classpath: A list of classpath entries

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
    if not ctx.sources:
      self.context.log.warn('Skipping {} compile for targets with no sources:\n  {}'
                            .format(self.name(), vts.targets))
    else:
      counter_val = str(counter()).rjust(counter.format_length(), b' ')
      counter_str = '[{}/{}] '.format(counter_val, counter.size)
      # Do some reporting.
      self.context.log.info(
        counter_str,
        'Compiling ',
        items_to_report_element(ctx.sources, '{} source'.format(self.name())),
        ' in ',
        items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
        ' (',
        progress_message,
        ').')
      with self.context.new_workunit('compile', labels=[WorkUnitLabel.COMPILER]) as compile_workunit:
        if self.get_options().capture_classpath:
          self._record_compile_classpath(classpath, vts.targets, ctx.classes_dir)

        try:
          self.compile(
            ctx,
            self._args,
            classpath,
            upstream_analysis,
            settings,
            fatal_warnings,
            zinc_file_manager,
            self._get_plugin_map('javac', self._zinc.javac_compiler_plugins_src(self), ctx.target),
            self._get_plugin_map('scalac', self._zinc.scalac_compiler_plugins_src(self), ctx.target),
          )
          self._capture_logs(compile_workunit, ctx.log_dir)
        except TaskError:
          if self.get_options().suggest_missing_deps:
            logs = [path
                    for _, name, _, path in self._find_logs(compile_workunit)
                    if name == self.name()]
            if logs:
              self._find_missing_deps(logs, ctx.target)
          raise
示例#7
0
  def _compile_vts(self, vts, target, sources, analysis_file, upstream_analysis, classpath, outdir,
                   log_file, progress_message, settings, fatal_warnings, zinc_file_manager,
                   counter):
    """Compiles sources for the given vts into the given output dir.

    vts - versioned target set
    sources - sources for this target set
    analysis_file - the analysis file to manipulate
    classpath - a list of classpath entries
    outdir - the output dir to send classes to

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
    if not sources:
      self.context.log.warn('Skipping {} compile for targets with no sources:\n  {}'
                            .format(self.name(), vts.targets))
    else:
      counter_val = str(counter()).rjust(counter.format_length(), b' ')
      counter_str = '[{}/{}] '.format(counter_val, counter.size)
      # Do some reporting.
      self.context.log.info(
        counter_str,
        'Compiling ',
        items_to_report_element(sources, '{} source'.format(self.name())),
        ' in ',
        items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
        ' (',
        progress_message,
        ').')
      with self.context.new_workunit('compile', labels=[WorkUnitLabel.COMPILER]) as compile_workunit:
        # The compiler may delete classfiles, then later exit on a compilation error. Then if the
        # change triggering the error is reverted, we won't rebuild to restore the missing
        # classfiles. So we force-invalidate here, to be on the safe side.
        vts.force_invalidate()
        if self.get_options().capture_classpath:
          self._record_compile_classpath(classpath, vts.targets, outdir)

        # If compiling a plugin, don't try to use it on itself.
        javac_plugins_to_exclude = (t.plugin for t in vts.targets if isinstance(t, JavacPlugin))
        try:
          self.compile(self._args, classpath, sources, outdir, upstream_analysis, analysis_file,
                       log_file, settings, fatal_warnings, zinc_file_manager,
                       javac_plugins_to_exclude)
        except TaskError:
          if self.get_options().suggest_missing_deps:
            logs = self._find_failed_compile_logs(compile_workunit)
            if logs:
              self._find_missing_deps('\n'.join([read_file(log) for log in logs]), target)
          raise
示例#8
0
    def _compile_vts(self, vts, sources, analysis_file, upstream_analysis,
                     classpath, outdir, log_file, progress_message, settings,
                     fatal_warnings, counter):
        """Compiles sources for the given vts into the given output dir.

    vts - versioned target set
    sources - sources for this target set
    analysis_file - the analysis file to manipulate
    classpath - a list of classpath entries
    outdir - the output dir to send classes to

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
        if not sources:
            self.context.log.warn(
                'Skipping {} compile for targets with no sources:\n  {}'.
                format(self.name(), vts.targets))
        else:
            counter_val = str(counter()).rjust(counter.format_length(), b' ')
            counter_str = '[{}/{}] '.format(counter_val, counter.size)
            # Do some reporting.
            self.context.log.info(
                counter_str, 'Compiling ',
                items_to_report_element(sources,
                                        '{} source'.format(self.name())),
                ' in ',
                items_to_report_element(
                    [t.address.reference() for t in vts.targets], 'target'),
                ' (', progress_message, ').')
            with self.context.new_workunit('compile',
                                           labels=[WorkUnitLabel.COMPILER]):
                # The compiler may delete classfiles, then later exit on a compilation error. Then if the
                # change triggering the error is reverted, we won't rebuild to restore the missing
                # classfiles. So we force-invalidate here, to be on the safe side.
                vts.force_invalidate()
                if self.get_options().capture_classpath:
                    self._record_compile_classpath(classpath, vts.targets,
                                                   outdir)

                # If compiling a plugin, don't try to use it on itself.
                javac_plugins_to_exclude = (t.plugin for t in vts.targets
                                            if isinstance(t, JavacPlugin))
                self.compile(self._args, classpath, sources, outdir,
                             upstream_analysis, analysis_file, log_file,
                             settings, fatal_warnings,
                             javac_plugins_to_exclude)
示例#9
0
文件: task.py 项目: wonlay/pants
 def _report_targets(self, prefix, targets, suffix, logger=None):
   logger = logger or self.context.log.info
   logger(
     prefix,
     items_to_report_element([t.address.reference() for t in targets], 'target'),
     suffix,
   )
示例#10
0
文件: task.py 项目: neven7/pants
 def _report_targets(self, prefix, targets, suffix, logger=None):
   logger = logger or self.context.log.info
   logger(
     prefix,
     items_to_report_element([t.address.reference() for t in targets], 'target'),
     suffix,
   )
示例#11
0
  def _compile_vts(self, vts, target, sources, analysis_file, upstream_analysis, classpath, outdir,
                   log_file, zinc_args_file, progress_message, settings, fatal_warnings,
                   zinc_file_manager, counter):
    """Compiles sources for the given vts into the given output dir.

    vts - versioned target set
    sources - sources for this target set
    analysis_file - the analysis file to manipulate
    classpath - a list of classpath entries
    outdir - the output dir to send classes to

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
    if not sources:
      self.context.log.warn('Skipping {} compile for targets with no sources:\n  {}'
                            .format(self.name(), vts.targets))
    else:
      counter_val = str(counter()).rjust(counter.format_length(), b' ')
      counter_str = '[{}/{}] '.format(counter_val, counter.size)
      # Do some reporting.
      self.context.log.info(
        counter_str,
        'Compiling ',
        items_to_report_element(sources, '{} source'.format(self.name())),
        ' in ',
        items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
        ' (',
        progress_message,
        ').')
      with self.context.new_workunit('compile', labels=[WorkUnitLabel.COMPILER]) as compile_workunit:
        if self.get_options().capture_classpath:
          self._record_compile_classpath(classpath, vts.targets, outdir)

        try:
          self.compile(self._args, classpath, sources, outdir, upstream_analysis, analysis_file,
                       log_file, zinc_args_file, settings, fatal_warnings, zinc_file_manager,
                       self._get_plugin_map('javac', target),
                       self._get_plugin_map('scalac', target))
        except TaskError:
          if self.get_options().suggest_missing_deps:
            logs = self._find_failed_compile_logs(compile_workunit)
            if logs:
              self._find_missing_deps('\n'.join([read_file(log).decode('utf-8') for log in logs]), target)
          raise
示例#12
0
    def work_for_vts_rsc_jdk():
      distribution = self._get_jvm_distribution()
      jvm_lib_jars_abs = distribution.find_libs(['rt.jar', 'dt.jar', 'jce.jar', 'tools.jar'])
      self._jvm_lib_jars_abs = jvm_lib_jars_abs

      metacp_inputs = tuple(jvm_lib_jars_abs)

      counter_val = str(counter()).rjust(counter.format_length(), ' ' if PY3 else b' ')
      counter_str = '[{}/{}] '.format(counter_val, counter.size)
      self.context.log.info(
        counter_str,
        'Metacp-ing ',
        items_to_report_element(metacp_inputs, 'jar'),
        ' in the jdk')

      # NB: Metacp doesn't handle the existence of possibly stale semanticdb jars,
      # so we explicitly clean the directory to keep it happy.
      safe_mkdir(index_dir, clean=True)

      with Timer() as timer:
        # Step 1: Convert classpath to SemanticDB
        # ---------------------------------------
        rsc_index_dir = fast_relpath(index_dir, get_buildroot())
        args = [
          '--verbose',
          # NB: The directory to dump the semanticdb jars generated by metacp.
          '--out', rsc_index_dir,
          os.pathsep.join(metacp_inputs),
        ]
        metacp_wu = self._runtool(
          'scala.meta.cli.Metacp',
          'metacp',
          args,
          distribution,
          tgt=target,
          input_files=tuple(
            # NB: no input files because the jdk is expected to exist on the system in a known
            #     location.
            #     Related: https://github.com/pantsbuild/pants/issues/6416
          ),
          output_dir=rsc_index_dir)
        metacp_stdout = stdout_contents(metacp_wu)
        metacp_result = json.loads(metacp_stdout)

        metai_classpath = self._collect_metai_classpath(metacp_result, jvm_lib_jars_abs)

        # Step 1.5: metai Index the semanticdbs
        # -------------------------------------
        self._run_metai_tool(distribution, metai_classpath, rsc_index_dir, tgt=target)

        self._jvm_lib_metacp_classpath = [os.path.join(get_buildroot(), x) for x in metai_classpath]

      self._record_target_stats(target,
        len(self._jvm_lib_metacp_classpath),
        len([]),
        timer.elapsed,
        False,
        'metacp'
      )
示例#13
0
    def render_cache_stats(artifact_cache_stats):
      def fix_detail_id(e, _id):
        return e if isinstance(e, string_types) else e + (_id, )

      msg_elements = []
      for cache_name, stat in artifact_cache_stats.stats_per_cache.items():
        msg_elements.extend([
          cache_name + ' artifact cache: ',
          # Explicitly set the detail ids, so their displayed/hidden state survives a refresh.
          fix_detail_id(items_to_report_element(stat.hit_targets, 'hit'), 'cache-hit-details'),
          ', ',
          fix_detail_id(items_to_report_element(stat.miss_targets, 'miss'), 'cache-miss-details'),
          '.'
        ])
      if not msg_elements:
        msg_elements = ['No artifact cache use.']
      return self._render_message(*msg_elements)
示例#14
0
    def render_cache_stats(artifact_cache_stats):
      def fix_detail_id(e, _id):
        return e if isinstance(e, basestring) else e + (_id, )

      msg_elements = []
      for cache_name, stat in artifact_cache_stats.stats_per_cache.items():
        msg_elements.extend([
          cache_name + ' artifact cache: ',
          # Explicitly set the detail ids, so their displayed/hidden state survives a refresh.
          fix_detail_id(items_to_report_element(stat.hit_targets, 'hit'), 'cache-hit-details'),
          ', ',
          fix_detail_id(items_to_report_element(stat.miss_targets, 'miss'), 'cache-miss-details'),
          '.'
        ])
      if not msg_elements:
        msg_elements = ['No artifact cache use.']
      return self._render_message(*msg_elements)
示例#15
0
 def _report_targets(self, prefix, targets, suffix, logger=None):
   target_address_references = [t.address.reference() for t in targets]
   msg_elements = [
     prefix,
     items_to_report_element(target_address_references, 'target'),
     suffix,
   ]
   logger = logger or self.context.log.info
   logger(*msg_elements)
示例#16
0
  def _register_vts(self, compile_contexts):
    classes_by_source = self.context.products.get_data('classes_by_source')
    classes_by_target = self.context.products.get_data('classes_by_target')
    compile_classpath = self.context.products.get_data('compile_classpath')
    resources_by_target = self.context.products.get_data('resources_by_target')

    # Register class products (and resources generated by annotation processors.)
    computed_classes_by_source_by_context = self._strategy.compute_classes_by_source(
        compile_contexts)
    resource_mapping = self._strategy.compute_resource_mapping(compile_contexts)
    for compile_context in compile_contexts:
      computed_classes_by_source = computed_classes_by_source_by_context[compile_context]
      target = compile_context.target
      classes_dir = compile_context.classes_dir

      def add_products_by_target(files):
        for f in files:
          clsname = self._strategy.class_name_for_class_file(compile_context, f)
          if clsname:
            # Is a class.
            classes_by_target[target].add_abs_paths(classes_dir, [f])
            resources = resource_mapping.get(clsname, [])
            resources_by_target[target].add_abs_paths(classes_dir, resources)
          else:
            # Is a resource.
            resources_by_target[target].add_abs_paths(classes_dir, [f])

      # Collect classfiles (absolute) that were claimed by sources (relative)
      for source in compile_context.sources:
        classes = computed_classes_by_source.get(source, [])
        add_products_by_target(classes)
        if classes_by_source is not None:
          classes_by_source[source].add_abs_paths(classes_dir, classes)

      # And any that were not claimed by sources (NB: `None` map key.)
      unclaimed_classes = computed_classes_by_source.get(None, [])
      if unclaimed_classes:
        self.context.log.debug(
          items_to_report_element(unclaimed_classes, 'class'),
          ' not claimed by analysis for ',
          str(compile_context.target)
        )
        add_products_by_target(unclaimed_classes)

    # Register resource products.
    for compile_context in compile_contexts:
      extra_resources = self.extra_products(compile_context.target)
      # Add to resources_by_target (if it was requested).
      if resources_by_target is not None:
        target_resources = resources_by_target[compile_context.target]
        for root, abs_paths in extra_resources:
          target_resources.add_abs_paths(root, abs_paths)
      # And to the compile_classpath, to make them available within the next round.
      # TODO(stuhood): This is redundant with resources_by_target, but resources_by_target
      # are not available during compilation. https://github.com/pantsbuild/pants/issues/206
      entries = [(conf, root) for conf in self._confs for root, _ in extra_resources]
      compile_classpath.add_for_target(compile_context.target, entries)
示例#17
0
文件: task.py 项目: rkstap/pants
 def _report_targets(self, prefix, targets, suffix, logger=None):
   target_address_references = [t.address.reference() for t in targets]
   msg_elements = [
     prefix,
     items_to_report_element(target_address_references, 'target'),
     suffix,
   ]
   logger = logger or self.context.log.info
   logger(*msg_elements)
示例#18
0
    def _compile_vts(self, vts, sources, analysis_file, upstream_analysis,
                     classpath, outdir, log_file, progress_message, settings):
        """Compiles sources for the given vts into the given output dir.

    vts - versioned target set
    sources - sources for this target set
    analysis_file - the analysis file to manipulate
    classpath - a list of classpath entries
    outdir - the output dir to send classes to

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
        if not sources:
            self.context.log.warn(
                'Skipping {} compile for targets with no sources:\n  {}'.
                format(self.name(), vts.targets))
        else:
            # Do some reporting.
            self.context.log.info(
                'Compiling ',
                items_to_report_element(sources,
                                        '{} source'.format(self.name())),
                ' in ',
                items_to_report_element(
                    [t.address.reference() for t in vts.targets], 'target'),
                ' (', progress_message, ').')
            with self.context.new_workunit('compile',
                                           labels=[WorkUnitLabel.COMPILER]):
                # The compiler may delete classfiles, then later exit on a compilation error. Then if the
                # change triggering the error is reverted, we won't rebuild to restore the missing
                # classfiles. So we force-invalidate here, to be on the safe side.
                vts.force_invalidate()
                self.compile(self._args, classpath, sources, outdir,
                             upstream_analysis, analysis_file, log_file,
                             settings)
示例#19
0
    def _check_cache_before_work(self,
                                 work_str,
                                 vts,
                                 ctx,
                                 counter,
                                 debug=False,
                                 work_fn=lambda: None):
        hit_cache = self.check_cache(vts, counter)

        if not hit_cache:
            counter_val = str(counter()).rjust(counter.format_length(), ' ')
            counter_str = '[{}/{}] '.format(counter_val, counter.size)
            log_fn = self.context.log.debug if debug else self.context.log.info
            log_fn(
                counter_str, f'{work_str} ',
                items_to_report_element(ctx.sources,
                                        '{} source'.format(self.name())),
                ' in ',
                items_to_report_element(
                    [t.address.reference() for t in vts.targets], 'target'),
                ' (', ctx.target.address.spec, ').')

            work_fn()
示例#20
0
        def render_cache_stats(artifact_cache_stats):
            def fix_detail_id(e, _id):
                return e if isinstance(e, str) else e + (_id, )

            msg_elements = []
            for cache_name, stat in artifact_cache_stats.stats_per_cache.items(
            ):
                # TODO consider display causes for hit/miss targets
                hit_targets = [tgt for tgt, cause in stat.hit_targets]
                miss_targets = [tgt for tgt, cause in stat.miss_targets]
                msg_elements.extend([
                    cache_name + " artifact cache: ",
                    # Explicitly set the detail ids, so their displayed/hidden state survives a refresh.
                    fix_detail_id(items_to_report_element(hit_targets, "hit"),
                                  "cache-hit-details"),
                    ", ",
                    fix_detail_id(
                        items_to_report_element(miss_targets, "miss"),
                        "cache-miss-details"),
                    ".",
                ])
            if not msg_elements:
                msg_elements = ["No artifact cache use."]
            return self._render_message(*msg_elements)
示例#21
0
文件: task.py 项目: rkstap/pants
  def invalidated(self,
                  targets,
                  invalidate_dependents=False,
                  silent=False,
                  fingerprint_strategy=None,
                  topological_order=False):
    """Checks targets for invalidation, first checking the artifact cache.

    Subclasses call this to figure out what to work on.

    :API: public

    :param targets: The targets to check for changes.
    :param invalidate_dependents: If True then any targets depending on changed targets are
                                  invalidated.
    :param silent: If true, suppress logging information about target invalidation.
    :param fingerprint_strategy: A FingerprintStrategy instance, which can do per task,
                                finer grained fingerprinting of a given Target.
    :param topological_order: Whether to invalidate in dependency order.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.

    :returns: Yields an InvalidationCheck object reflecting the targets.
    :rtype: InvalidationCheck
    """
    invalidation_check = self._do_invalidation_check(fingerprint_strategy,
                                                     invalidate_dependents,
                                                     targets,
                                                     topological_order)

    self._maybe_create_results_dirs(invalidation_check.all_vts)

    if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled():
      with self.context.new_workunit('cache'):
        cached_vts, uncached_vts, uncached_causes = \
          self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
      if cached_vts:
        cached_targets = [vt.target for vt in cached_vts]
        self.context.run_tracker.artifact_cache_stats.add_hits(self._task_name, cached_targets)
        if not silent:
          self._report_targets('Using cached artifacts for ', cached_targets, '.')
      if uncached_vts:
        uncached_targets = [vt.target for vt in uncached_vts]
        self.context.run_tracker.artifact_cache_stats.add_misses(self._task_name,
                                                                 uncached_targets,
                                                                 uncached_causes)
        if not silent:
          self._report_targets('No cached artifacts for ', uncached_targets, '.')
      # Now that we've checked the cache, re-partition whatever is still invalid.
      invalidation_check = InvalidationCheck(invalidation_check.all_vts, uncached_vts)

    if not silent:
      targets = []
      for vt in invalidation_check.invalid_vts:
        targets.extend(vt.targets)

      if len(targets):
        target_address_references = [t.address.reference() for t in targets]
        msg_elements = [
          'Invalidated ',
          items_to_report_element(target_address_references, 'target'),
          '.',
        ]
        self.context.log.info(*msg_elements)

    self._update_invalidation_report(invalidation_check, 'pre-check')

    # Cache has been checked to create the full list of invalid VTs.
    # Only copy previous_results for this subset of VTs.
    if self.incremental:
      for vts in invalidation_check.invalid_vts:
        vts.copy_previous_results()

    # This may seem odd: why would we need to invalidate a VersionedTargetSet that is already
    # invalid?  But the name force_invalidate() is slightly misleading in this context - what it
    # actually does is delete the key file created at the end of the last successful task run.
    # This is necessary to avoid the following scenario:
    #
    # 1) In state A: Task suceeds and writes some output.  Key is recorded by the invalidator.
    # 2) In state B: Task fails, but writes some output.  Key is not recorded.
    # 3) After reverting back to state A: The current key is the same as the one recorded at the
    #    end of step 1), so it looks like no work needs to be done, but actually the task
    #   must re-run, to overwrite the output written in step 2.
    #
    # Deleting the file ensures that if a task fails, there is no key for which we might think
    # we're in a valid state.
    for vts in invalidation_check.invalid_vts:
      vts.force_invalidate()

    # Yield the result, and then mark the targets as up to date.
    yield invalidation_check

    self._update_invalidation_report(invalidation_check, 'post-check')

    for vt in invalidation_check.invalid_vts:
      vt.update()

    # Background work to clean up previous builds.
    if self.context.options.for_global_scope().workdir_max_build_entries is not None:
      self._launch_background_workdir_cleanup(invalidation_check.all_vts)
示例#22
0
文件: task.py 项目: Medium/pants
    def invalidated(
        self, targets, invalidate_dependents=False, silent=False, fingerprint_strategy=None, topological_order=False
    ):
        """Checks targets for invalidation, first checking the artifact cache.

    Subclasses call this to figure out what to work on.

    :API: public

    :param targets:               The targets to check for changes.
    :param invalidate_dependents: If True then any targets depending on changed targets are invalidated.
    :param fingerprint_strategy:   A FingerprintStrategy instance, which can do per task, finer grained
                                  fingerprinting of a given Target.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.

    :returns: Yields an InvalidationCheck object reflecting the targets.
    :rtype: InvalidationCheck
    """

        fingerprint_strategy = fingerprint_strategy or TaskIdentityFingerprintStrategy(self)
        cache_manager = self.create_cache_manager(invalidate_dependents, fingerprint_strategy=fingerprint_strategy)

        invalidation_check = cache_manager.check(targets, topological_order=topological_order)

        self._maybe_create_results_dirs(invalidation_check.all_vts)

        if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled():
            with self.context.new_workunit("cache"):
                cached_vts, uncached_vts, uncached_causes = self.check_artifact_cache(
                    self.check_artifact_cache_for(invalidation_check)
                )
            if cached_vts:
                cached_targets = [vt.target for vt in cached_vts]
                self.context.run_tracker.artifact_cache_stats.add_hits(cache_manager.task_name, cached_targets)
                if not silent:
                    self._report_targets("Using cached artifacts for ", cached_targets, ".")
            if uncached_vts:
                uncached_targets = [vt.target for vt in uncached_vts]
                self.context.run_tracker.artifact_cache_stats.add_misses(
                    cache_manager.task_name, uncached_targets, uncached_causes
                )
                if not silent:
                    self._report_targets("No cached artifacts for ", uncached_targets, ".")
            # Now that we've checked the cache, re-partition whatever is still invalid.
            invalidation_check = InvalidationCheck(invalidation_check.all_vts, uncached_vts)

        if not silent:
            targets = []
            for vt in invalidation_check.invalid_vts:
                targets.extend(vt.targets)

            if len(targets):
                msg_elements = [
                    "Invalidated ",
                    items_to_report_element([t.address.reference() for t in targets], "target"),
                ]
                msg_elements.append(".")
                self.context.log.info(*msg_elements)

        invalidation_report = self.context.invalidation_report
        if invalidation_report:
            for vts in invalidation_check.all_vts:
                invalidation_report.add_vts(cache_manager, vts.targets, vts.cache_key, vts.valid, phase="pre-check")

        # Yield the result, and then mark the targets as up to date.
        yield invalidation_check

        if invalidation_report:
            for vts in invalidation_check.all_vts:
                invalidation_report.add_vts(cache_manager, vts.targets, vts.cache_key, vts.valid, phase="post-check")

        for vt in invalidation_check.invalid_vts:
            vt.update()

        # Background work to clean up previous builds.
        if self.context.options.for_global_scope().workdir_max_build_entries is not None:
            self._launch_background_workdir_cleanup(invalidation_check.all_vts)
示例#23
0
文件: task.py 项目: cheister/pants
  def invalidated(self,
                  targets,
                  invalidate_dependents=False,
                  silent=False,
                  fingerprint_strategy=None,
                  topological_order=False,
                  use_cache=True):
    """Checks targets for invalidation, first checking the artifact cache.

    Subclasses call this to figure out what to work on.

    :param targets:               The targets to check for changes.
    :param invalidate_dependents: If True then any targets depending on changed targets are invalidated.
    :param fingerprint_strategy:   A FingerprintStrategy instance, which can do per task, finer grained
                                  fingerprinting of a given Target.
    :param use_cache:             A boolean to indicate whether to read/write the cache within this
                                  invalidate call. In order for the cache to be used, both the task
                                  settings and this parameter must agree that they should be used.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.

    :returns: Yields an InvalidationCheck object reflecting the targets.
    :rtype: InvalidationCheck
    """

    fingerprint_strategy = fingerprint_strategy or TaskIdentityFingerprintStrategy(self)
    cache_manager = self.create_cache_manager(invalidate_dependents,
                                              fingerprint_strategy=fingerprint_strategy)

    invalidation_check = cache_manager.check(targets, topological_order=topological_order)

    if invalidation_check.invalid_vts and use_cache and self.artifact_cache_reads_enabled():
      with self.context.new_workunit('cache'):
        cached_vts, uncached_vts, uncached_causes = \
          self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
      if cached_vts:
        cached_targets = [vt.target for vt in cached_vts]
        self.context.run_tracker.artifact_cache_stats.add_hits(cache_manager.task_name,
                                                               cached_targets)
        if not silent:
          self._report_targets('Using cached artifacts for ', cached_targets, '.')
      if uncached_vts:
        uncached_targets = [vt.target for vt in uncached_vts]
        self.context.run_tracker.artifact_cache_stats.add_misses(cache_manager.task_name,
                                                                 uncached_targets,
                                                                 uncached_causes)
        if not silent:
          self._report_targets('No cached artifacts for ', uncached_targets, '.')
      # Now that we've checked the cache, re-partition whatever is still invalid.
      invalidation_check = \
        InvalidationCheck(invalidation_check.all_vts, uncached_vts)

    self._maybe_create_results_dirs(invalidation_check.all_vts)

    if not silent:
      targets = []
      for vt in invalidation_check.invalid_vts:
        targets.extend(vt.targets)

      if len(targets):
        msg_elements = ['Invalidated ',
                        items_to_report_element([t.address.reference() for t in targets], 'target')]
        msg_elements.append('.')
        self.context.log.info(*msg_elements)

    invalidation_report = self.context.invalidation_report
    if invalidation_report:
      for vts in invalidation_check.all_vts:
        invalidation_report.add_vts(cache_manager, vts.targets, vts.cache_key, vts.valid,
                                    phase='pre-check')

    # Yield the result, and then mark the targets as up to date.
    yield invalidation_check

    if invalidation_report:
      for vts in invalidation_check.all_vts:
        invalidation_report.add_vts(cache_manager, vts.targets, vts.cache_key, vts.valid,
                                    phase='post-check')
    for vt in invalidation_check.invalid_vts:
      vt.update()  # In case the caller doesn't update.

    # Background work to clean up previous builds.
    if self.context.options.for_global_scope().workdir_max_build_entries is not None:
      self._launch_background_workdir_cleanup(invalidation_check.all_vts)

    write_to_cache = (self.cache_target_dirs
                      and use_cache
                      and self.artifact_cache_writes_enabled()
                      and invalidation_check.invalid_vts)
    if write_to_cache:
      pairs = []
      for vt in invalidation_check.invalid_vts:
        if self._should_cache(vt):
          pairs.append((vt, [vt.results_dir]))
      self.update_artifact_cache(pairs)
示例#24
0
        def work_for_vts_rsc(vts, ctx):
            target = ctx.target
            tgt, = vts.targets

            rsc_cc = compile_contexts[target].rsc_cc

            use_youtline = rsc_cc.workflow == self.JvmCompileWorkflowType.outline_and_zinc
            outliner = 'scalac-outliner' if use_youtline else 'rsc'

            if use_youtline and Semver.parse(
                    self._scala_library_version) < Semver.parse("2.12.9"):
                raise RuntimeError(
                    f"To use scalac's built-in outlining, scala version must be at least 2.12.9, but got {self._scala_library_version}"
                )

            # If we didn't hit the cache in the cache job, run rsc.
            if not vts.valid:
                counter_val = str(counter()).rjust(counter.format_length(),
                                                   ' ')
                counter_str = '[{}/{}] '.format(counter_val, counter.size)
                action_str = 'Outlining ' if use_youtline else 'Rsc-ing '

                self.context.log.info(
                    counter_str, action_str,
                    items_to_report_element(ctx.sources,
                                            '{} source'.format(self.name())),
                    ' in ',
                    items_to_report_element(
                        [t.address.reference() for t in vts.targets],
                        'target'), ' (', ctx.target.address.spec, ').')
                # This does the following
                # - Collect the rsc classpath elements, including zinc compiles of rsc incompatible targets
                #   and rsc compiles of rsc compatible targets.
                # - Run Rsc on the current target with those as dependencies.

                dependencies_for_target = list(
                    DependencyContext.global_instance(
                    ).dependencies_respecting_strict_deps(target))

                classpath_paths = []
                classpath_directory_digests = []
                classpath_product = self.context.products.get_data(
                    'rsc_mixed_compile_classpath')
                classpath_entries = classpath_product.get_classpath_entries_for_targets(
                    dependencies_for_target)
                for _conf, classpath_entry in classpath_entries:
                    classpath_paths.append(
                        fast_relpath(classpath_entry.path, get_buildroot()))
                    if self.execution_strategy == self.ExecutionStrategy.hermetic and not classpath_entry.directory_digest:
                        raise AssertionError(
                            "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic "
                            "execution of {}".format(classpath_entry,
                                                     outliner))
                    classpath_directory_digests.append(
                        classpath_entry.directory_digest)

                ctx.ensure_output_dirs_exist()

                with Timer() as timer:
                    # Outline Scala sources into SemanticDB / scalac compatible header jars.
                    # ---------------------------------------------
                    rsc_jar_file_relative_path = fast_relpath(
                        ctx.rsc_jar_file.path, get_buildroot())

                    sources_snapshot = ctx.target.sources_snapshot(
                        scheduler=self.context._scheduler)

                    distribution = self._get_jvm_distribution()

                    def hermetic_digest_classpath():
                        jdk_libs_rel, jdk_libs_digest = self._jdk_libs_paths_and_digest(
                            distribution)

                        merged_sources_and_jdk_digest = self.context._scheduler.merge_directories(
                            (jdk_libs_digest,
                             sources_snapshot.directory_digest) +
                            tuple(classpath_directory_digests))
                        classpath_rel_jdk = classpath_paths + jdk_libs_rel
                        return (merged_sources_and_jdk_digest,
                                classpath_rel_jdk)

                    def nonhermetic_digest_classpath():
                        classpath_abs_jdk = classpath_paths + self._jdk_libs_abs(
                            distribution)
                        return ((EMPTY_DIRECTORY_DIGEST), classpath_abs_jdk)

                    (input_digest,
                     classpath_entry_paths) = self.execution_strategy.match({
                         self.ExecutionStrategy.hermetic:
                         hermetic_digest_classpath,
                         self.ExecutionStrategy.subprocess:
                         nonhermetic_digest_classpath,
                         self.ExecutionStrategy.nailgun:
                         nonhermetic_digest_classpath,
                     })()

                    youtline_args = []
                    if use_youtline:
                        youtline_args = [
                            "-Youtline",
                            "-Ystop-after:pickler",
                            "-Ypickle-write",
                            rsc_jar_file_relative_path,
                        ]
                        if not self.get_options().allow_public_inference:
                            wartremover_args = [
                                f"-Xplugin:{self._wartremover_classpath[0]}",
                                "-P:wartremover:traverser:org.wartremover.warts.PublicInference",
                                "-Ycache-plugin-class-loader:last-modified",
                            ]
                            youtline_args = wartremover_args + youtline_args

                    target_sources = ctx.sources
                    args = [
                        '-cp',
                        os.pathsep.join(classpath_entry_paths),
                        '-d',
                        rsc_jar_file_relative_path,
                    ] + self.get_options(
                    ).extra_rsc_args + youtline_args + target_sources

                    self.write_argsfile(ctx, args)

                    self._runtool(distribution, input_digest, ctx,
                                  use_youtline)

                self._record_target_stats(tgt, len(classpath_entry_paths),
                                          len(target_sources), timer.elapsed,
                                          False, outliner)

            # Update the products with the latest classes.
            self.register_extra_products_from_contexts([ctx.target],
                                                       compile_contexts)
示例#25
0
    def work_for_vts_rsc(vts, ctx):
      # Double check the cache before beginning compilation
      hit_cache = self.check_cache(vts, counter)
      target = ctx.target
      tgt, = vts.targets

      if not hit_cache:
        counter_val = str(counter()).rjust(counter.format_length(), ' ' if PY3 else b' ')
        counter_str = '[{}/{}] '.format(counter_val, counter.size)
        self.context.log.info(
          counter_str,
          'Rsc-ing ',
          items_to_report_element(ctx.sources, '{} source'.format(self.name())),
          ' in ',
          items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
          ' (',
          ctx.target.address.spec,
          ').')

        # This does the following
        # - collect jar dependencies and metacp-classpath entries for them
        # - collect the non-java targets and their classpath entries
        # - break out java targets and their javac'd classpath entries
        # metacp
        # - metacp the java targets
        # rsc
        # - combine the metacp outputs for jars, previous scala targets and the java metacp
        #   classpath
        # - run Rsc on the current target with those as dependencies

        dependencies_for_target = list(
          DependencyContext.global_instance().dependencies_respecting_strict_deps(target))

        jar_deps = [t for t in dependencies_for_target if isinstance(t, JarLibrary)]

        def is_java_compile_target(t):
          return isinstance(t, JavaLibrary) or t.has_sources('.java')
        java_deps = [t for t in dependencies_for_target
                     if is_java_compile_target(t)]
        non_java_deps = [t for t in dependencies_for_target
                         if not (is_java_compile_target(t)) and not isinstance(t, JarLibrary)]

        metacped_jar_classpath_abs = _paths_from_classpath(
          self._metacp_jars_classpath_product.get_for_targets(jar_deps + java_deps)
        )
        metacped_jar_classpath_abs.extend(self._jvm_lib_metacp_classpath)
        metacped_jar_classpath_rel = fast_relpath_collection(metacped_jar_classpath_abs)

        non_java_paths = _paths_from_classpath(
          self.context.products.get_data('rsc_classpath').get_for_targets(non_java_deps),
          collection_type=set)
        non_java_rel = fast_relpath_collection(non_java_paths)

        ctx.ensure_output_dirs_exist()

        distribution = self._get_jvm_distribution()
        with Timer() as timer:
          # Outline Scala sources into SemanticDB
          # ---------------------------------------------
          rsc_mjar_file = fast_relpath(ctx.rsc_mjar_file, get_buildroot())

          # TODO remove non-rsc entries from non_java_rel in a better way
          rsc_semanticdb_classpath = metacped_jar_classpath_rel + \
                                     [j for j in non_java_rel if 'compile/rsc/' in j]
          target_sources = ctx.sources
          args = [
                   '-cp', os.pathsep.join(rsc_semanticdb_classpath),
                   '-d', rsc_mjar_file,
                 ] + target_sources
          sources_snapshot = ctx.target.sources_snapshot(scheduler=self.context._scheduler)
          self._runtool(
            'rsc.cli.Main',
            'rsc',
            args,
            distribution,
            tgt=tgt,
            input_files=tuple(rsc_semanticdb_classpath),
            input_digest=sources_snapshot.directory_digest,
            output_dir=os.path.dirname(rsc_mjar_file))

        self._record_target_stats(tgt,
          len(rsc_semanticdb_classpath),
          len(target_sources),
          timer.elapsed,
          False,
          'rsc'
        )
        # Write any additional resources for this target to the target workdir.
        self.write_extra_resources(ctx)

      # Update the products with the latest classes.
      self.register_extra_products_from_contexts([ctx.target], compile_contexts)
示例#26
0
    def invalidated(self,
                    targets,
                    invalidate_dependents=False,
                    silent=False,
                    fingerprint_strategy=None,
                    topological_order=False,
                    use_cache=True):
        """Checks targets for invalidation, first checking the artifact cache.

    Subclasses call this to figure out what to work on.

    :API: public

    :param targets:               The targets to check for changes.
    :param invalidate_dependents: If True then any targets depending on changed targets are invalidated.
    :param fingerprint_strategy:   A FingerprintStrategy instance, which can do per task, finer grained
                                  fingerprinting of a given Target.
    :param use_cache:             A boolean to indicate whether to read/write the cache within this
                                  invalidate call. In order for the cache to be used, both the task
                                  settings and this parameter must agree that they should be used.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.

    :returns: Yields an InvalidationCheck object reflecting the targets.
    :rtype: InvalidationCheck
    """

        fingerprint_strategy = fingerprint_strategy or TaskIdentityFingerprintStrategy(
            self)
        cache_manager = self.create_cache_manager(
            invalidate_dependents, fingerprint_strategy=fingerprint_strategy)

        invalidation_check = cache_manager.check(
            targets, topological_order=topological_order)

        if invalidation_check.invalid_vts and use_cache and self.artifact_cache_reads_enabled(
        ):
            with self.context.new_workunit('cache'):
                cached_vts, uncached_vts, uncached_causes = \
                  self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
            if cached_vts:
                cached_targets = [vt.target for vt in cached_vts]
                self.context.run_tracker.artifact_cache_stats.add_hits(
                    cache_manager.task_name, cached_targets)
                if not silent:
                    self._report_targets('Using cached artifacts for ',
                                         cached_targets, '.')
            if uncached_vts:
                uncached_targets = [vt.target for vt in uncached_vts]
                self.context.run_tracker.artifact_cache_stats.add_misses(
                    cache_manager.task_name, uncached_targets, uncached_causes)
                if not silent:
                    self._report_targets('No cached artifacts for ',
                                         uncached_targets, '.')
            # Now that we've checked the cache, re-partition whatever is still invalid.
            invalidation_check = \
              InvalidationCheck(invalidation_check.all_vts, uncached_vts)

        self._maybe_create_results_dirs(invalidation_check.all_vts)

        if not silent:
            targets = []
            for vt in invalidation_check.invalid_vts:
                targets.extend(vt.targets)

            if len(targets):
                msg_elements = [
                    'Invalidated ',
                    items_to_report_element(
                        [t.address.reference() for t in targets], 'target')
                ]
                msg_elements.append('.')
                self.context.log.info(*msg_elements)

        invalidation_report = self.context.invalidation_report
        if invalidation_report:
            for vts in invalidation_check.all_vts:
                invalidation_report.add_vts(cache_manager,
                                            vts.targets,
                                            vts.cache_key,
                                            vts.valid,
                                            phase='pre-check')

        # Yield the result, and then mark the targets as up to date.
        yield invalidation_check

        if invalidation_report:
            for vts in invalidation_check.all_vts:
                invalidation_report.add_vts(cache_manager,
                                            vts.targets,
                                            vts.cache_key,
                                            vts.valid,
                                            phase='post-check')
        for vt in invalidation_check.invalid_vts:
            vt.update()  # In case the caller doesn't update.

        # Background work to clean up previous builds.
        if self.context.options.for_global_scope(
        ).workdir_max_build_entries is not None:
            self._launch_background_workdir_cleanup(invalidation_check.all_vts)

        write_to_cache = (self.cache_target_dirs and use_cache
                          and self.artifact_cache_writes_enabled()
                          and invalidation_check.invalid_vts)
        if write_to_cache:
            pairs = []
            for vt in invalidation_check.invalid_vts:
                if self._should_cache(vt):
                    pairs.append((vt, [vt.current_results_dir]))
            self.update_artifact_cache(pairs)
示例#27
0
    def invalidated(self,
                    targets,
                    invalidate_dependents=False,
                    partition_size_hint=sys.maxint,
                    silent=False,
                    locally_changed_targets=None,
                    fingerprint_strategy=None,
                    topological_order=False):
        """Checks targets for invalidation, first checking the artifact cache.

    Subclasses call this to figure out what to work on.

    :param targets:               The targets to check for changes.
    :param invalidate_dependents: If True then any targets depending on changed targets are invalidated.
    :param partition_size_hint:   Each VersionedTargetSet in the yielded list will represent targets
                                  containing roughly this number of source files, if possible. Set to
                                  sys.maxint for a single VersionedTargetSet. Set to 0 for one
                                  VersionedTargetSet per target. It is up to the caller to do the right
                                  thing with whatever partitioning it asks for.
    :param locally_changed_targets: Targets that we've edited locally. If specified, and there aren't too
                                  many of them, we keep these in separate partitions from other targets,
                                  as these are more likely to have build errors, and so to be rebuilt over
                                  and over, and partitioning them separately is a performance win.
    :param fingerprint_strategy:   A FingerprintStrategy instance, which can do per task, finer grained
                                  fingerprinting of a given Target.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.

    :returns: Yields an InvalidationCheck object reflecting the (partitioned) targets.
    :rtype: InvalidationCheck
    """

        # TODO(benjy): Compute locally_changed_targets here instead of passing it in? We currently pass
        # it in because JvmCompile already has the source->target mapping for other reasons, and also
        # to selectively enable this feature.
        fingerprint_strategy = fingerprint_strategy or TaskIdentityFingerprintStrategy(
            self)
        cache_manager = self.create_cache_manager(
            invalidate_dependents, fingerprint_strategy=fingerprint_strategy)
        # We separate locally-modified targets from others by coloring them differently.
        # This can be a performance win, because these targets are more likely to be iterated
        # over, and this preserves "chunk stability" for them.
        colors = {}

        # But we only do so if there aren't too many, or this optimization will backfire.
        locally_changed_target_limit = 10

        if locally_changed_targets and len(
                locally_changed_targets) < locally_changed_target_limit:
            for t in targets:
                if t in locally_changed_targets:
                    colors[t] = 'locally_changed'
                else:
                    colors[t] = 'not_locally_changed'
        invalidation_check = cache_manager.check(
            targets,
            partition_size_hint,
            colors,
            topological_order=topological_order)

        if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled(
        ):
            with self.context.new_workunit('cache'):
                cached_vts, uncached_vts = \
                  self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
            if cached_vts:
                cached_targets = [vt.target for vt in cached_vts]
                for t in cached_targets:
                    self.context.run_tracker.artifact_cache_stats.add_hit(
                        'default', t)
                if not silent:
                    self._report_targets('Using cached artifacts for ',
                                         cached_targets, '.')
            if uncached_vts:
                uncached_targets = [vt.target for vt in uncached_vts]
                for t in uncached_targets:
                    self.context.run_tracker.artifact_cache_stats.add_miss(
                        'default', t)
                if not silent:
                    self._report_targets('No cached artifacts for ',
                                         uncached_targets, '.')
            # Now that we've checked the cache, re-partition whatever is still invalid.
            invalidation_check = \
              InvalidationCheck(invalidation_check.all_vts, uncached_vts, partition_size_hint, colors)

        self._maybe_create_results_dirs(invalidation_check.all_vts)

        if not silent:
            targets = []
            num_invalid_partitions = len(
                invalidation_check.invalid_vts_partitioned)
            for vt in invalidation_check.invalid_vts_partitioned:
                targets.extend(vt.targets)

            if len(targets):
                msg_elements = [
                    'Invalidated ',
                    items_to_report_element(
                        [t.address.reference() for t in targets], 'target')
                ]
                if num_invalid_partitions > 1:
                    msg_elements.append(' in {} target partitions'.format(
                        num_invalid_partitions))
                msg_elements.append('.')
                self.context.log.info(*msg_elements)

        invalidation_report = self.context.invalidation_report
        if invalidation_report:
            for vts in invalidation_check.all_vts:
                invalidation_report.add_vts(cache_manager,
                                            vts.targets,
                                            vts.cache_key,
                                            vts.valid,
                                            phase='pre-check')

        # Yield the result, and then mark the targets as up to date.
        yield invalidation_check

        if invalidation_report:
            for vts in invalidation_check.all_vts:
                invalidation_report.add_vts(cache_manager,
                                            vts.targets,
                                            vts.cache_key,
                                            vts.valid,
                                            phase='post-check')
        for vt in invalidation_check.invalid_vts:
            vt.update()  # In case the caller doesn't update.

        write_to_cache = (self.cache_target_dirs
                          and self.artifact_cache_writes_enabled()
                          and invalidation_check.invalid_vts)
        if write_to_cache:
            pairs = []
            for vt in invalidation_check.invalid_vts:
                if self._should_cache(vt):
                    pairs.append((vt, [vt.results_dir]))
            self.update_artifact_cache(pairs)
示例#28
0
        def work_for_vts_rsc(vts, ctx):
            # Double check the cache before beginning compilation
            hit_cache = self.check_cache(vts, counter)
            target = ctx.target
            tgt, = vts.targets

            if not hit_cache:
                counter_val = str(counter()).rjust(counter.format_length(),
                                                   b' ')
                counter_str = '[{}/{}] '.format(counter_val, counter.size)
                self.context.log.info(
                    counter_str, 'Rsc-ing ',
                    items_to_report_element(ctx.sources,
                                            '{} source'.format(self.name())),
                    ' in ',
                    items_to_report_element(
                        [t.address.reference() for t in vts.targets],
                        'target'), ' (', ctx.target.address.spec, ').')

                # This does the following
                # - collect jar dependencies and metacp-classpath entries for them
                # - collect the non-java targets and their classpath entries
                # - break out java targets and their javac'd classpath entries
                # metacp
                # - metacp the java targets
                # rsc
                # - combine the metacp outputs for jars, previous scala targets and the java metacp
                #   classpath
                # - run Rsc on the current target with those as dependencies

                dependencies_for_target = list(
                    DependencyContext.global_instance(
                    ).dependencies_respecting_strict_deps(target))

                jar_deps = [
                    t for t in dependencies_for_target
                    if isinstance(t, JarLibrary)
                ]

                def is_java_compile_target(t):
                    return isinstance(t, JavaLibrary) or t.has_sources('.java')

                java_deps = [
                    t for t in dependencies_for_target
                    if is_java_compile_target(t)
                ]
                non_java_deps = [
                    t for t in dependencies_for_target
                    if not (is_java_compile_target(t))
                    and not isinstance(t, JarLibrary)
                ]

                metacped_jar_classpath_abs = _paths_from_classpath(
                    self._metacp_jars_classpath_product.get_for_targets(
                        jar_deps))
                metacped_jar_classpath_abs.extend(
                    self._jvm_lib_metacp_classpath)
                metacped_jar_classpath_rel = fast_relpath_collection(
                    metacped_jar_classpath_abs)

                jar_rsc_classpath_paths = _paths_from_classpath(
                    self.context.products.get_data(
                        'rsc_classpath').get_for_targets(jar_deps),
                    collection_type=set)
                jar_rsc_classpath_rel = fast_relpath_collection(
                    jar_rsc_classpath_paths)

                non_java_paths = _paths_from_classpath(
                    self.context.products.get_data(
                        'rsc_classpath').get_for_targets(non_java_deps),
                    collection_type=set)
                non_java_rel = fast_relpath_collection(non_java_paths)

                java_paths = _paths_from_classpath(
                    self.context.products.get_data(
                        'rsc_classpath').get_for_targets(java_deps),
                    collection_type=set)
                java_rel = fast_relpath_collection(java_paths)

                ctx.ensure_output_dirs_exist()

                distribution = self._get_jvm_distribution()
                with Timer() as timer:
                    # Step 1: Convert classpath to SemanticDB
                    # ---------------------------------------
                    # If there are any as yet not metacp'd dependencies, metacp them so their indices can
                    # be passed to Rsc.
                    # TODO move these to their own jobs. https://github.com/pantsbuild/pants/issues/6754

                    # Inputs
                    # - Java dependencies jars
                    metacp_inputs = java_rel

                    # Dependencies
                    # - 3rdparty jars
                    # - non-java, ie scala, dependencies
                    # - jdk
                    snapshotable_metacp_dependencies = list(jar_rsc_classpath_rel) + \
                                          list(non_java_rel) + \
                                          fast_relpath_collection(
                                            _paths_from_classpath(self._extra_compile_time_classpath))
                    metacp_dependencies = snapshotable_metacp_dependencies + self._jvm_lib_jars_abs

                    if metacp_inputs:
                        rsc_index_dir = fast_relpath(ctx.rsc_index_dir,
                                                     get_buildroot())
                        args = [
                            '--verbose',
                            '--stub-broken-signatures',
                            '--dependency-classpath',
                            os.pathsep.join(metacp_dependencies),
                            # NB: The directory to dump the semanticdb jars generated by metacp.
                            '--out',
                            rsc_index_dir,
                            os.pathsep.join(metacp_inputs),
                        ]
                        metacp_wu = self._runtool(
                            'scala.meta.cli.Metacp',
                            'metacp',
                            args,
                            distribution,
                            tgt=tgt,
                            input_files=tuple(
                                metacp_inputs +
                                snapshotable_metacp_dependencies),
                            output_dir=rsc_index_dir)
                        metacp_stdout = stdout_contents(metacp_wu)
                        metacp_result = json.loads(metacp_stdout)

                        metacped_java_dependency_rel = self._collect_metai_classpath(
                            metacp_result, java_rel)

                        # Step 1.5: metai Index the semanticdbs
                        # -------------------------------------
                        self._run_metai_tool(distribution,
                                             metacped_java_dependency_rel,
                                             rsc_index_dir, tgt)
                    else:
                        # NB: there are no unmetacp'd dependencies
                        metacped_java_dependency_rel = []

                    # Step 2: Outline Scala sources into SemanticDB
                    # ---------------------------------------------
                    rsc_mjar_file = fast_relpath(ctx.rsc_mjar_file,
                                                 get_buildroot())

                    # TODO remove non-rsc entries from non_java_rel in a better way
                    rsc_semanticdb_classpath = metacped_java_dependency_rel + \
                                               metacped_jar_classpath_rel + \
                                               [j for j in non_java_rel if 'compile/rsc/' in j]
                    target_sources = ctx.sources
                    args = [
                        '-cp',
                        os.pathsep.join(rsc_semanticdb_classpath),
                        '-d',
                        rsc_mjar_file,
                    ] + target_sources
                    sources_snapshot = ctx.target.sources_snapshot(
                        scheduler=self.context._scheduler)
                    self._runtool(
                        'rsc.cli.Main',
                        'rsc',
                        args,
                        distribution,
                        tgt=tgt,
                        input_files=tuple(rsc_semanticdb_classpath),
                        input_digest=sources_snapshot.directory_digest,
                        output_dir=os.path.dirname(rsc_mjar_file))

                self._record_target_stats(tgt, len(metacp_inputs),
                                          len(target_sources), timer.elapsed,
                                          False, 'rsc')
                # Write any additional resources for this target to the target workdir.
                self.write_extra_resources(ctx)

            # Update the products with the latest classes.
            self.register_extra_products_from_contexts([ctx.target],
                                                       compile_contexts)
示例#29
0
文件: task.py 项目: qiaohaijun/pants
 def _report_targets(self, prefix, targets, suffix):
     self.context.log.info(
         prefix, items_to_report_element([t.address.reference() for t in targets], "target"), suffix
     )
示例#30
0
        def work_for_vts_rsc(vts, ctx):
            target = ctx.target
            tgt, = vts.targets

            # If we didn't hit the cache in the cache job, run rsc.
            if not vts.valid:
                counter_val = str(counter()).rjust(counter.format_length(),
                                                   ' ')
                counter_str = '[{}/{}] '.format(counter_val, counter.size)
                self.context.log.info(
                    counter_str, 'Rsc-ing ',
                    items_to_report_element(ctx.sources,
                                            '{} source'.format(self.name())),
                    ' in ',
                    items_to_report_element(
                        [t.address.reference() for t in vts.targets],
                        'target'), ' (', ctx.target.address.spec, ').')
                # This does the following
                # - Collect the rsc classpath elements, including zinc compiles of rsc incompatible targets
                #   and rsc compiles of rsc compatible targets.
                # - Run Rsc on the current target with those as dependencies.

                dependencies_for_target = list(
                    DependencyContext.global_instance(
                    ).dependencies_respecting_strict_deps(target))

                classpath_paths = []
                classpath_directory_digests = []
                classpath_product = self.context.products.get_data(
                    'rsc_mixed_compile_classpath')
                classpath_entries = classpath_product.get_classpath_entries_for_targets(
                    dependencies_for_target)
                for _conf, classpath_entry in classpath_entries:
                    classpath_paths.append(
                        fast_relpath(classpath_entry.path, get_buildroot()))
                    if self.HERMETIC == self.execution_strategy_enum.value and not classpath_entry.directory_digest:
                        raise AssertionError(
                            "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic "
                            "execution of rsc".format(classpath_entry))
                    classpath_directory_digests.append(
                        classpath_entry.directory_digest)

                ctx.ensure_output_dirs_exist()

                with Timer() as timer:
                    # Outline Scala sources into SemanticDB / scalac compatible header jars.
                    # ---------------------------------------------
                    rsc_jar_file_relative_path = fast_relpath(
                        ctx.rsc_jar_file.path, get_buildroot())

                    sources_snapshot = ctx.target.sources_snapshot(
                        scheduler=self.context._scheduler)

                    distribution = self._get_jvm_distribution()

                    def hermetic_digest_classpath():
                        jdk_libs_rel, jdk_libs_digest = self._jdk_libs_paths_and_digest(
                            distribution)

                        merged_sources_and_jdk_digest = self.context._scheduler.merge_directories(
                            (jdk_libs_digest,
                             sources_snapshot.directory_digest) +
                            tuple(classpath_directory_digests))
                        classpath_rel_jdk = classpath_paths + jdk_libs_rel
                        return (merged_sources_and_jdk_digest,
                                classpath_rel_jdk)

                    def nonhermetic_digest_classpath():
                        classpath_abs_jdk = classpath_paths + self._jdk_libs_abs(
                            distribution)
                        return ((EMPTY_DIRECTORY_DIGEST), classpath_abs_jdk)

                    (input_digest, classpath_entry_paths
                     ) = self.execution_strategy_enum.resolve_for_enum_variant(
                         {
                             self.HERMETIC: hermetic_digest_classpath,
                             self.SUBPROCESS: nonhermetic_digest_classpath,
                             self.NAILGUN: nonhermetic_digest_classpath,
                         })()

                    target_sources = ctx.sources
                    args = [
                        '-cp',
                        os.pathsep.join(classpath_entry_paths),
                        '-d',
                        rsc_jar_file_relative_path,
                    ] + self.get_options().extra_rsc_args + target_sources

                    self.write_argsfile(ctx, args)

                    self._runtool(distribution, input_digest, ctx)

                self._record_target_stats(tgt, len(classpath_entry_paths),
                                          len(target_sources), timer.elapsed,
                                          False, 'rsc')

            # Update the products with the latest classes.
            self.register_extra_products_from_contexts([ctx.target],
                                                       compile_contexts)
示例#31
0
    def _compile_vts(
        self,
        vts,
        sources,
        analysis_file,
        upstream_analysis,
        classpath,
        outdir,
        log_file,
        progress_message,
        settings,
        fatal_warnings,
        counter,
    ):
        """Compiles sources for the given vts into the given output dir.

    vts - versioned target set
    sources - sources for this target set
    analysis_file - the analysis file to manipulate
    classpath - a list of classpath entries
    outdir - the output dir to send classes to

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
        if not sources:
            self.context.log.warn(
                "Skipping {} compile for targets with no sources:\n  {}".format(self.name(), vts.targets)
            )
        else:
            counter_val = str(counter()).rjust(counter.format_length(), b" ")
            counter_str = "[{}/{}] ".format(counter_val, counter.size)
            # Do some reporting.
            self.context.log.info(
                counter_str,
                "Compiling ",
                items_to_report_element(sources, "{} source".format(self.name())),
                " in ",
                items_to_report_element([t.address.reference() for t in vts.targets], "target"),
                " (",
                progress_message,
                ").",
            )
            with self.context.new_workunit("compile", labels=[WorkUnitLabel.COMPILER]):
                # The compiler may delete classfiles, then later exit on a compilation error. Then if the
                # change triggering the error is reverted, we won't rebuild to restore the missing
                # classfiles. So we force-invalidate here, to be on the safe side.
                vts.force_invalidate()
                self.compile(
                    self._args,
                    classpath,
                    sources,
                    outdir,
                    upstream_analysis,
                    analysis_file,
                    log_file,
                    settings,
                    fatal_warnings,
                )
示例#32
0
文件: task.py 项目: ejconlon/pants
  def invalidated(self, targets, only_buildfiles=False, invalidate_dependents=False,
                  partition_size_hint=sys.maxint, silent=False, locally_changed_targets=None):
    """Checks targets for invalidation, first checking the artifact cache.
    Subclasses call this to figure out what to work on.

    targets:                 The targets to check for changes.
    only_buildfiles:         If True, then only the target's BUILD files are checked for changes,
                             not its sources.
    invalidate_dependents:   If True then any targets depending on changed targets are invalidated.
    partition_size_hint:     Each VersionedTargetSet in the yielded list will represent targets
                             containing roughly this number of source files, if possible. Set to
                             sys.maxint for a single VersionedTargetSet. Set to 0 for one
                             VersionedTargetSet per target. It is up to the caller to do the right
                             thing with whatever partitioning it asks for.
    locally_changed_targets: Targets that we've edited locally. If specified, and there aren't too
                             many of them, we keep these in separate partitions from other targets,
                             as these are more likely to have build errors, and so to be rebuilt over
                             and over, and partitioning them separately is a performance win.

    Yields an InvalidationCheck object reflecting the (partitioned) targets.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.
    """
    # TODO(benjy): Compute locally_changed_targets here instead of passing it in? We currently pass
    # it in because JvmCompile already has the source->target mapping for other reasons, and also
    # to selectively enable this feature.
    extra_data = [self.invalidate_for()]

    for f in self.invalidate_for_files():
      extra_data.append(hash_file(f))

    cache_manager = InvalidationCacheManager(self._cache_key_generator,
                                             self._build_invalidator_dir,
                                             invalidate_dependents,
                                             extra_data)

    # We separate locally-modified targets from others by coloring them differently.
    # This can be a performance win, because these targets are more likely to be iterated
    # over, and this preserves "chunk stability" for them.
    colors = {}

    # But we only do so if there aren't too many, or this optimization will backfire.
    locally_changed_target_limit = 10

    if locally_changed_targets and len(locally_changed_targets) < locally_changed_target_limit:
      for t in targets:
        if t in locally_changed_targets:
          colors[t] = 'locally_changed'
        else:
          colors[t] = 'not_locally_changed'
    invalidation_check = cache_manager.check(targets, partition_size_hint, colors)

    if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled():
      with self.context.new_workunit('cache'):
        cached_vts, uncached_vts = \
          self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
      if cached_vts:
        cached_targets = [vt.target for vt in cached_vts]
        for t in cached_targets:
          self.context.run_tracker.artifact_cache_stats.add_hit('default', t)
        if not silent:
          self._report_targets('Using cached artifacts for ', cached_targets, '.')
      if uncached_vts:
        uncached_targets = [vt.target for vt in uncached_vts]
        for t in uncached_targets:
          self.context.run_tracker.artifact_cache_stats.add_miss('default', t)
        if not silent:
          self._report_targets('No cached artifacts for ', uncached_targets, '.')
      # Now that we've checked the cache, re-partition whatever is still invalid.
      invalidation_check = \
        InvalidationCheck(invalidation_check.all_vts, uncached_vts, partition_size_hint, colors)

    if not silent:
      targets = []
      payloads = []
      num_invalid_partitions = len(invalidation_check.invalid_vts_partitioned)
      for vt in invalidation_check.invalid_vts_partitioned:
        targets.extend(vt.targets)
        payloads.extend(vt.cache_key.payloads)
      if len(targets):
        msg_elements = ['Invalidated ',
                        items_to_report_element([t.address.reference() for t in targets], 'target')]
        if len(payloads) > 0:
          msg_elements.append(' containing ')
          msg_elements.append(items_to_report_element(payloads, 'payload file'))
        if num_invalid_partitions > 1:
          msg_elements.append(' in %d target partitions' % num_invalid_partitions)
        msg_elements.append('.')
        self.context.log.info(*msg_elements)

    # Yield the result, and then mark the targets as up to date.
    yield invalidation_check
    for vt in invalidation_check.invalid_vts:
      vt.update()  # In case the caller doesn't update.
示例#33
0
    def work_for_vts_metacp(vts, ctx, classpath_product_key):
      metacp_dependencies_entries = self._zinc.compile_classpath_entries(
        classpath_product_key,
        ctx.target,
        extra_cp_entries=self._extra_compile_time_classpath)

      metacp_dependencies = fast_relpath_collection(c.path for c in metacp_dependencies_entries)


      metacp_dependencies_digests = [c.directory_digest for c in metacp_dependencies_entries
                                     if c.directory_digest]
      metacp_dependencies_paths_without_digests = fast_relpath_collection(
        c.path for c in metacp_dependencies_entries if not c.directory_digest)

      classpath_entries = [
        cp_entry for (conf, cp_entry) in
        self.context.products.get_data(classpath_product_key).get_classpath_entries_for_targets(
          [ctx.target])
      ]
      classpath_digests = [c.directory_digest for c in classpath_entries if c.directory_digest]
      classpath_paths_without_digests = fast_relpath_collection(
        c.path for c in classpath_entries if not c.directory_digest)

      classpath_abs = [c.path for c in classpath_entries]
      classpath_rel = fast_relpath_collection(classpath_abs)

      metacp_inputs = []
      metacp_inputs.extend(classpath_rel)

      counter_val = str(counter()).rjust(counter.format_length(), ' ' if PY3 else b' ')
      counter_str = '[{}/{}] '.format(counter_val, counter.size)
      self.context.log.info(
        counter_str,
        'Metacp-ing ',
        items_to_report_element(metacp_inputs, 'jar'),
        ' in ',
        items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
        ' (',
        ctx.target.address.spec,
        ').')

      ctx.ensure_output_dirs_exist()

      tgt, = vts.targets
      with Timer() as timer:
        # Step 1: Convert classpath to SemanticDB
        # ---------------------------------------
        rsc_index_dir = fast_relpath(ctx.rsc_index_dir, get_buildroot())
        args = [
          '--verbose',
          '--stub-broken-signatures',
          '--dependency-classpath', os.pathsep.join(
            metacp_dependencies +
            fast_relpath_collection(self._jvm_lib_jars_abs)
          ),
          # NB: The directory to dump the semanticdb jars generated by metacp.
          '--out', rsc_index_dir,
          os.pathsep.join(metacp_inputs),
        ]

        # NB: If we're building a scala library jar,
        #     also request that metacp generate the indices
        #     for the scala synthetics.
        if self._is_scala_core_library(tgt):
          args = [
            '--include-scala-library-synthetics',
          ] + args
        distribution = self._get_jvm_distribution()

        input_digest = self.context._scheduler.merge_directories(
          tuple(classpath_digests + metacp_dependencies_digests))

        metacp_wu = self._runtool(
          'scala.meta.cli.Metacp',
          'metacp',
          args,
          distribution,
          tgt=tgt,
          input_digest=input_digest,
          input_files=tuple(classpath_paths_without_digests +
                            metacp_dependencies_paths_without_digests),
          output_dir=rsc_index_dir)
        metacp_result = json.loads(stdout_contents(metacp_wu))

        metai_classpath = self._collect_metai_classpath(metacp_result, classpath_rel)

        # Step 1.5: metai Index the semanticdbs
        # -------------------------------------
        self._run_metai_tool(distribution, metai_classpath, rsc_index_dir, tgt)

        abs_output = [(conf, os.path.join(get_buildroot(), x))
                      for conf in self._confs for x in metai_classpath]

        self._metacp_jars_classpath_product.add_for_target(
          ctx.target,
          abs_output,
        )

      self._record_target_stats(tgt,
          len(abs_output),
          len([]),
          timer.elapsed,
          False,
          'metacp'
        )
示例#34
0
    def work_for_vts_rsc(vts, ctx):
      # Double check the cache before beginning compilation
      hit_cache = self.check_cache(vts, counter)
      target = ctx.target
      tgt, = vts.targets

      if not hit_cache:
        counter_val = str(counter()).rjust(counter.format_length(), ' ' if PY3 else b' ')
        counter_str = '[{}/{}] '.format(counter_val, counter.size)
        self.context.log.info(
          counter_str,
          'Rsc-ing ',
          items_to_report_element(ctx.sources, '{} source'.format(self.name())),
          ' in ',
          items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
          ' (',
          ctx.target.address.spec,
          ').')

        # This does the following
        # - Collect the rsc classpath elements, including zinc compiles of rsc incompatible targets
        #   and rsc compiles of rsc compatible targets.
        # - Run Rsc on the current target with those as dependencies.

        dependencies_for_target = list(
          DependencyContext.global_instance().dependencies_respecting_strict_deps(target))

        rsc_deps_classpath_unprocessed = _paths_from_classpath(
          self.context.products.get_data('rsc_classpath').get_for_targets(dependencies_for_target),
          collection_type=OrderedSet)

        rsc_classpath_rel = fast_relpath_collection(list(rsc_deps_classpath_unprocessed))

        ctx.ensure_output_dirs_exist()

        with Timer() as timer:
          # Outline Scala sources into SemanticDB / scalac compatible header jars.
          # ---------------------------------------------
          rsc_jar_file = fast_relpath(ctx.rsc_jar_file, get_buildroot())

          sources_snapshot = ctx.target.sources_snapshot(scheduler=self.context._scheduler)

          distribution = self._get_jvm_distribution()

          def hermetic_digest_classpath():
            jdk_libs_rel, jdk_libs_digest = self._jdk_libs_paths_and_digest(distribution)
            merged_sources_and_jdk_digest = self.context._scheduler.merge_directories(
              (jdk_libs_digest, sources_snapshot.directory_digest))
            classpath_rel_jdk = rsc_classpath_rel + jdk_libs_rel
            return (merged_sources_and_jdk_digest, classpath_rel_jdk)
          def nonhermetic_digest_classpath():
            classpath_abs_jdk = rsc_classpath_rel + self._jdk_libs_abs(distribution)
            return ((EMPTY_DIRECTORY_DIGEST), classpath_abs_jdk)

          (input_digest, classpath_entry_paths) = self.execution_strategy_enum.resolve_for_enum_variant({
            self.HERMETIC: hermetic_digest_classpath,
            self.SUBPROCESS: nonhermetic_digest_classpath,
            self.NAILGUN: nonhermetic_digest_classpath,
          })()

          target_sources = ctx.sources
          args = [
                   '-cp', os.pathsep.join(classpath_entry_paths),
                   '-d', rsc_jar_file,
                 ] + target_sources

          self._runtool(
            'rsc.cli.Main',
            'rsc',
            args,
            distribution,
            tgt=tgt,
            input_files=tuple(rsc_classpath_rel),
            input_digest=input_digest,
            output_dir=os.path.dirname(rsc_jar_file))

        self._record_target_stats(tgt,
          len(rsc_classpath_rel),
          len(target_sources),
          timer.elapsed,
          False,
          'rsc'
        )
        # Write any additional resources for this target to the target workdir.
        self.write_extra_resources(ctx)

      # Update the products with the latest classes.
      self.register_extra_products_from_contexts([ctx.target], compile_contexts)
示例#35
0
文件: task.py 项目: wonlay/pants
  def invalidated(self,
                  targets,
                  invalidate_dependents=False,
                  silent=False,
                  fingerprint_strategy=None,
                  topological_order=False):
    """Checks targets for invalidation, first checking the artifact cache.

    Subclasses call this to figure out what to work on.

    :API: public

    :param targets: The targets to check for changes.
    :param invalidate_dependents: If True then any targets depending on changed targets are
                                  invalidated.
    :param silent: If true, suppress logging information about target invalidation.
    :param fingerprint_strategy: A FingerprintStrategy instance, which can do per task,
                                finer grained fingerprinting of a given Target.
    :param topological_order: Whether to invalidate in dependency order.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.

    :returns: Yields an InvalidationCheck object reflecting the targets.
    :rtype: InvalidationCheck
    """

    cache_key_generator = CacheKeyGenerator(
      self.context.options.for_global_scope().cache_key_gen_version,
      self.fingerprint)
    cache_manager = InvalidationCacheManager(self.workdir,
                                             cache_key_generator,
                                             self._build_invalidator_dir,
                                             invalidate_dependents,
                                             fingerprint_strategy=fingerprint_strategy,
                                             invalidation_report=self.context.invalidation_report,
                                             task_name=type(self).__name__,
                                             task_version=self.implementation_version_str(),
                                             artifact_write_callback=self.maybe_write_artifact)

    invalidation_check = cache_manager.check(targets, topological_order=topological_order)

    self._maybe_create_results_dirs(invalidation_check.all_vts)

    if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled():
      with self.context.new_workunit('cache'):
        cached_vts, uncached_vts, uncached_causes = \
          self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
      if cached_vts:
        cached_targets = [vt.target for vt in cached_vts]
        self.context.run_tracker.artifact_cache_stats.add_hits(cache_manager.task_name,
                                                               cached_targets)
        if not silent:
          self._report_targets('Using cached artifacts for ', cached_targets, '.')
      if uncached_vts:
        uncached_targets = [vt.target for vt in uncached_vts]
        self.context.run_tracker.artifact_cache_stats.add_misses(cache_manager.task_name,
                                                                 uncached_targets,
                                                                 uncached_causes)
        if not silent:
          self._report_targets('No cached artifacts for ', uncached_targets, '.')
      # Now that we've checked the cache, re-partition whatever is still invalid.
      invalidation_check = \
        InvalidationCheck(invalidation_check.all_vts, uncached_vts)

    if not silent:
      targets = []
      for vt in invalidation_check.invalid_vts:
        targets.extend(vt.targets)

      if len(targets):
        msg_elements = ['Invalidated ',
                        items_to_report_element([t.address.reference() for t in targets], 'target'),
                        '.']
        self.context.log.info(*msg_elements)

    invalidation_report = self.context.invalidation_report
    if invalidation_report:
      for vts in invalidation_check.all_vts:
        invalidation_report.add_vts(cache_manager, vts.targets, vts.cache_key, vts.valid,
                                    phase='pre-check')

    # Cache has been checked to create the full list of invalid VTs.
    # Only copy previous_results for this subset of VTs.
    for vts in invalidation_check.invalid_vts:
      if self.incremental:
        vts.copy_previous_results(self.workdir)

    # Yield the result, and then mark the targets as up to date.
    yield invalidation_check

    if invalidation_report:
      for vts in invalidation_check.all_vts:
        invalidation_report.add_vts(cache_manager, vts.targets, vts.cache_key, vts.valid,
                                    phase='post-check')

    for vt in invalidation_check.invalid_vts:
      vt.update()

    # Background work to clean up previous builds.
    if self.context.options.for_global_scope().workdir_max_build_entries is not None:
      self._launch_background_workdir_cleanup(invalidation_check.all_vts)
示例#36
0
        def work_for_vts_rsc(vts, ctx):
            target = ctx.target
            (tgt, ) = vts.targets

            rsc_cc = compile_contexts[target].rsc_cc

            use_youtline = rsc_cc.workflow == self.JvmCompileWorkflowType.outline_and_zinc
            outliner = "scalac-outliner" if use_youtline else "rsc"

            if use_youtline and Semver.parse(
                    self._scala_library_version) < Semver.parse("2.12.9"):
                raise RuntimeError(
                    f"To use scalac's built-in outlining, scala version must be at least 2.12.9, but got {self._scala_library_version}"
                )

            # If we didn't hit the cache in the cache job, run rsc.
            if not vts.valid:
                counter_val = str(counter()).rjust(counter.format_length(),
                                                   " ")
                counter_str = f"[{counter_val}/{counter.size}] "
                action_str = "Outlining " if use_youtline else "Rsc-ing "

                self.context.log.info(
                    counter_str,
                    action_str,
                    items_to_report_element(ctx.sources,
                                            f"{self.name()} source"),
                    " in ",
                    items_to_report_element(
                        [t.address.reference() for t in vts.targets],
                        "target"),
                    " (",
                    ctx.target.address.spec,
                    ").",
                )
                # This does the following
                # - Collect the rsc classpath elements, including zinc compiles of rsc incompatible targets
                #   and rsc compiles of rsc compatible targets.
                # - Run Rsc on the current target with those as dependencies.

                dependencies_for_target = list(
                    DependencyContext.global_instance(
                    ).dependencies_respecting_strict_deps(target))

                classpath_paths = []
                classpath_digests = []
                classpath_product = self.context.products.get_data(
                    "rsc_mixed_compile_classpath")
                classpath_entries = classpath_product.get_classpath_entries_for_targets(
                    dependencies_for_target)

                hermetic = self.execution_strategy == self.ExecutionStrategy.hermetic
                for _conf, classpath_entry in classpath_entries:
                    classpath_paths.append(
                        fast_relpath(classpath_entry.path, get_buildroot()))
                    if hermetic and not classpath_entry.directory_digest:
                        raise AssertionError(
                            "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic "
                            "execution of {}".format(classpath_entry,
                                                     outliner))
                    classpath_digests.append(classpath_entry.directory_digest)

                ctx.ensure_output_dirs_exist()

                with Timer() as timer:
                    # Outline Scala sources into SemanticDB / scalac compatible header jars.
                    # ---------------------------------------------
                    rsc_jar_file_relative_path = fast_relpath(
                        ctx.rsc_jar_file.path, get_buildroot())

                    sources_snapshot = ctx.target.sources_snapshot(
                        scheduler=self.context._scheduler)

                    distribution = self._get_jvm_distribution()

                    def hermetic_digest_classpath():
                        jdk_libs_rel, jdk_libs_digest = self._jdk_libs_paths_and_digest(
                            distribution)

                        merged_sources_and_jdk_digest = self.context._scheduler.merge_directories(
                            (jdk_libs_digest, sources_snapshot.digest) +
                            tuple(classpath_digests))
                        classpath_rel_jdk = classpath_paths + jdk_libs_rel
                        return (merged_sources_and_jdk_digest,
                                classpath_rel_jdk)

                    def nonhermetic_digest_classpath():
                        classpath_abs_jdk = classpath_paths + self._jdk_libs_abs(
                            distribution)
                        return ((EMPTY_DIGEST), classpath_abs_jdk)

                    (input_digest, classpath_entry_paths) = match(
                        self.execution_strategy,
                        {
                            self.ExecutionStrategy.hermetic:
                            hermetic_digest_classpath,
                            self.ExecutionStrategy.subprocess:
                            nonhermetic_digest_classpath,
                            self.ExecutionStrategy.nailgun:
                            nonhermetic_digest_classpath,
                        },
                    )()

                    youtline_args = []
                    if use_youtline:
                        youtline_args = [
                            "-Youtline",
                            "-Ystop-after:pickler",
                            "-Ypickle-write",
                            rsc_jar_file_relative_path,
                        ]

                    target_sources = ctx.sources

                    # TODO: m.jar digests aren't found, so hermetic will fail.
                    if use_youtline and not hermetic and self.get_options(
                    ).zinc_outline:
                        self._zinc_outline(ctx, classpath_paths,
                                           target_sources, youtline_args)
                    else:
                        args = ([
                            "-cp",
                            os.pathsep.join(classpath_entry_paths),
                            "-d",
                            rsc_jar_file_relative_path,
                        ] + self.get_options().extra_rsc_args + youtline_args +
                                target_sources)

                        self.write_argsfile(ctx, args)

                        self._runtool(distribution, input_digest, ctx,
                                      use_youtline)

                self._record_target_stats(
                    tgt,
                    len(classpath_entry_paths),
                    len(target_sources),
                    timer.elapsed,
                    False,
                    outliner,
                )

            # Update the products with the latest classes.
            self.register_extra_products_from_contexts([ctx.target],
                                                       compile_contexts)
示例#37
0
  def invalidated(self,
                  targets,
                  invalidate_dependents=False,
                  silent=False,
                  fingerprint_strategy=None,
                  topological_order=False):
    """Checks targets for invalidation, first checking the artifact cache.

    Subclasses call this to figure out what to work on.

    :API: public

    :param targets: The targets to check for changes.
    :param invalidate_dependents: If True then any targets depending on changed targets are
                                  invalidated.
    :param silent: If true, suppress logging information about target invalidation.
    :param fingerprint_strategy: A FingerprintStrategy instance, which can do per task,
                                finer grained fingerprinting of a given Target.
    :param topological_order: Whether to invalidate in dependency order.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.

    :returns: Yields an InvalidationCheck object reflecting the targets.
    :rtype: InvalidationCheck
    """
    invalidation_check = self._do_invalidation_check(fingerprint_strategy,
                                                     invalidate_dependents,
                                                     targets,
                                                     topological_order)

    self._maybe_create_results_dirs(invalidation_check.all_vts)

    if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled():
      with self.context.new_workunit('cache'):
        cached_vts, uncached_vts, uncached_causes = \
          self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
      if cached_vts:
        cached_targets = [vt.target for vt in cached_vts]
        self.context.run_tracker.artifact_cache_stats.add_hits(self._task_name, cached_targets)
        if not silent:
          self._report_targets('Using cached artifacts for ', cached_targets, '.')
      if uncached_vts:
        uncached_targets = [vt.target for vt in uncached_vts]
        self.context.run_tracker.artifact_cache_stats.add_misses(self._task_name,
                                                                 uncached_targets,
                                                                 uncached_causes)
        if not silent:
          self._report_targets('No cached artifacts for ', uncached_targets, '.')
      # Now that we've checked the cache, re-partition whatever is still invalid.
      invalidation_check = InvalidationCheck(invalidation_check.all_vts, uncached_vts)

    if not silent:
      targets = []
      for vt in invalidation_check.invalid_vts:
        targets.extend(vt.targets)

      if len(targets):
        target_address_references = [t.address.reference() for t in targets]
        msg_elements = [
          'Invalidated ',
          items_to_report_element(target_address_references, 'target'),
          '.',
        ]
        self.context.log.info(*msg_elements)

    self._update_invalidation_report(invalidation_check, 'pre-check')

    # Cache has been checked to create the full list of invalid VTs.
    # Only copy previous_results for this subset of VTs.
    if self.incremental:
      for vts in invalidation_check.invalid_vts:
        vts.copy_previous_results()

    # This may seem odd: why would we need to invalidate a VersionedTargetSet that is already
    # invalid?  But the name force_invalidate() is slightly misleading in this context - what it
    # actually does is delete the key file created at the end of the last successful task run.
    # This is necessary to avoid the following scenario:
    #
    # 1) In state A: Task suceeds and writes some output.  Key is recorded by the invalidator.
    # 2) In state B: Task fails, but writes some output.  Key is not recorded.
    # 3) After reverting back to state A: The current key is the same as the one recorded at the
    #    end of step 1), so it looks like no work needs to be done, but actually the task
    #   must re-run, to overwrite the output written in step 2.
    #
    # Deleting the file ensures that if a task fails, there is no key for which we might think
    # we're in a valid state.
    for vts in invalidation_check.invalid_vts:
      vts.force_invalidate()

    # Yield the result, and then mark the targets as up to date.
    yield invalidation_check

    self._update_invalidation_report(invalidation_check, 'post-check')

    for vt in invalidation_check.invalid_vts:
      vt.update()

    # Background work to clean up previous builds.
    if self.context.options.for_global_scope().workdir_max_build_entries is not None:
      self._launch_background_workdir_cleanup(invalidation_check.all_vts)
示例#38
0
文件: task.py 项目: qiaohaijun/pants
    def invalidated(
        self,
        targets,
        invalidate_dependents=False,
        partition_size_hint=sys.maxint,
        silent=False,
        locally_changed_targets=None,
        fingerprint_strategy=None,
        topological_order=False,
    ):
        """Checks targets for invalidation, first checking the artifact cache.

    Subclasses call this to figure out what to work on.

    :param targets:               The targets to check for changes.
    :param invalidate_dependents: If True then any targets depending on changed targets are invalidated.
    :param partition_size_hint:   Each VersionedTargetSet in the yielded list will represent targets
                                  containing roughly this number of source files, if possible. Set to
                                  sys.maxint for a single VersionedTargetSet. Set to 0 for one
                                  VersionedTargetSet per target. It is up to the caller to do the right
                                  thing with whatever partitioning it asks for.
    :param locally_changed_targets: Targets that we've edited locally. If specified, and there aren't too
                                  many of them, we keep these in separate partitions from other targets,
                                  as these are more likely to have build errors, and so to be rebuilt over
                                  and over, and partitioning them separately is a performance win.
    :param fingerprint_strategy:   A FingerprintStrategy instance, which can do per task, finer grained
                                  fingerprinting of a given Target.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.

    :returns: Yields an InvalidationCheck object reflecting the (partitioned) targets.
    :rtype: InvalidationCheck
    """

        # TODO(benjy): Compute locally_changed_targets here instead of passing it in? We currently pass
        # it in because JvmCompile already has the source->target mapping for other reasons, and also
        # to selectively enable this feature.
        cache_manager = self.create_cache_manager(invalidate_dependents, fingerprint_strategy=fingerprint_strategy)

        # We separate locally-modified targets from others by coloring them differently.
        # This can be a performance win, because these targets are more likely to be iterated
        # over, and this preserves "chunk stability" for them.
        colors = {}

        # But we only do so if there aren't too many, or this optimization will backfire.
        locally_changed_target_limit = 10

        if locally_changed_targets and len(locally_changed_targets) < locally_changed_target_limit:
            for t in targets:
                if t in locally_changed_targets:
                    colors[t] = "locally_changed"
                else:
                    colors[t] = "not_locally_changed"
        invalidation_check = cache_manager.check(
            targets, partition_size_hint, colors, topological_order=topological_order
        )

        if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled():
            with self.context.new_workunit("cache"):
                cached_vts, uncached_vts = self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
            if cached_vts:
                cached_targets = [vt.target for vt in cached_vts]
                for t in cached_targets:
                    self.context.run_tracker.artifact_cache_stats.add_hit("default", t)
                if not silent:
                    self._report_targets("Using cached artifacts for ", cached_targets, ".")
            if uncached_vts:
                uncached_targets = [vt.target for vt in uncached_vts]
                for t in uncached_targets:
                    self.context.run_tracker.artifact_cache_stats.add_miss("default", t)
                if not silent:
                    self._report_targets("No cached artifacts for ", uncached_targets, ".")
            # Now that we've checked the cache, re-partition whatever is still invalid.
            invalidation_check = InvalidationCheck(
                invalidation_check.all_vts, uncached_vts, partition_size_hint, colors
            )

        if self.cache_target_dirs:
            for vt in invalidation_check.all_vts:
                vt.create_results_dir(os.path.join(self.workdir, vt.cache_key.hash))

        if not silent:
            targets = []
            num_invalid_partitions = len(invalidation_check.invalid_vts_partitioned)
            for vt in invalidation_check.invalid_vts_partitioned:
                targets.extend(vt.targets)

            if len(targets):
                msg_elements = [
                    "Invalidated ",
                    items_to_report_element([t.address.reference() for t in targets], "target"),
                ]
                if num_invalid_partitions > 1:
                    msg_elements.append(" in {} target partitions".format(num_invalid_partitions))
                msg_elements.append(".")
                self.context.log.info(*msg_elements)

        # Yield the result, and then mark the targets as up to date.
        yield invalidation_check

        for vt in invalidation_check.invalid_vts:
            vt.update()  # In case the caller doesn't update.

        write_to_cache = (
            self.cache_target_dirs and self.artifact_cache_writes_enabled() and invalidation_check.invalid_vts
        )
        if write_to_cache:

            def result_files(vt):
                return [os.path.join(vt.results_dir, f) for f in os.listdir(vt.results_dir)]

            pairs = [(vt, result_files(vt)) for vt in invalidation_check.invalid_vts]
            self.update_artifact_cache(pairs)
示例#39
0
    def invalidated(self,
                    targets,
                    invalidate_dependents=False,
                    silent=False,
                    fingerprint_strategy=None,
                    topological_order=False):
        """Checks targets for invalidation, first checking the artifact cache.

    Subclasses call this to figure out what to work on.

    :API: public

    :param targets: The targets to check for changes.
    :param invalidate_dependents: If True then any targets depending on changed targets are
                                  invalidated.
    :param silent: If true, suppress logging information about target invalidation.
    :param fingerprint_strategy: A FingerprintStrategy instance, which can do per task,
                                finer grained fingerprinting of a given Target.
    :param topological_order: Whether to invalidate in dependency order.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.

    :returns: Yields an InvalidationCheck object reflecting the targets.
    :rtype: InvalidationCheck
    """

        cache_key_generator = CacheKeyGenerator(
            self.context.options.for_global_scope().cache_key_gen_version,
            self.fingerprint)
        cache_manager = InvalidationCacheManager(
            self.workdir,
            cache_key_generator,
            self._build_invalidator_dir,
            invalidate_dependents,
            fingerprint_strategy=fingerprint_strategy,
            invalidation_report=self.context.invalidation_report,
            task_name=type(self).__name__,
            task_version=self.implementation_version_str(),
            artifact_write_callback=self.maybe_write_artifact)

        invalidation_check = cache_manager.check(
            targets, topological_order=topological_order)

        self._maybe_create_results_dirs(invalidation_check.all_vts)

        if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled(
        ):
            with self.context.new_workunit('cache'):
                cached_vts, uncached_vts, uncached_causes = \
                  self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
            if cached_vts:
                cached_targets = [vt.target for vt in cached_vts]
                self.context.run_tracker.artifact_cache_stats.add_hits(
                    cache_manager.task_name, cached_targets)
                if not silent:
                    self._report_targets('Using cached artifacts for ',
                                         cached_targets, '.')
            if uncached_vts:
                uncached_targets = [vt.target for vt in uncached_vts]
                self.context.run_tracker.artifact_cache_stats.add_misses(
                    cache_manager.task_name, uncached_targets, uncached_causes)
                if not silent:
                    self._report_targets('No cached artifacts for ',
                                         uncached_targets, '.')
            # Now that we've checked the cache, re-partition whatever is still invalid.
            invalidation_check = \
              InvalidationCheck(invalidation_check.all_vts, uncached_vts)

        if not silent:
            targets = []
            for vt in invalidation_check.invalid_vts:
                targets.extend(vt.targets)

            if len(targets):
                msg_elements = [
                    'Invalidated ',
                    items_to_report_element(
                        [t.address.reference() for t in targets], 'target'),
                    '.'
                ]
                self.context.log.info(*msg_elements)

        invalidation_report = self.context.invalidation_report
        if invalidation_report:
            for vts in invalidation_check.all_vts:
                invalidation_report.add_vts(cache_manager,
                                            vts.targets,
                                            vts.cache_key,
                                            vts.valid,
                                            phase='pre-check')

        # Cache has been checked to create the full list of invalid VTs.
        # Only copy previous_results for this subset of VTs.
        if self.incremental:
            for vts in invalidation_check.invalid_vts:
                vts.copy_previous_results()

        # Yield the result, and then mark the targets as up to date.
        yield invalidation_check

        if invalidation_report:
            for vts in invalidation_check.all_vts:
                invalidation_report.add_vts(cache_manager,
                                            vts.targets,
                                            vts.cache_key,
                                            vts.valid,
                                            phase='post-check')

        for vt in invalidation_check.invalid_vts:
            vt.update()

        # Background work to clean up previous builds.
        if self.context.options.for_global_scope(
        ).workdir_max_build_entries is not None:
            self._launch_background_workdir_cleanup(invalidation_check.all_vts)
示例#40
0
        def work_for_vts_rsc_jar_library(vts, ctx):
            distribution = self._get_jvm_distribution()

            # TODO use compile_classpath
            classpath_abs = [
                path for (conf, path) in self.context.products.get_data(
                    'rsc_classpath').get_for_target(ctx.target)
            ]

            dependency_classpath = self._zinc.compile_classpath(
                'compile_classpath',
                ctx.target,
                extra_cp_entries=self._extra_compile_time_classpath)
            dependency_classpath = fast_relpath_collection(
                dependency_classpath)

            classpath_rel = fast_relpath_collection(classpath_abs)

            cp_entries = []
            cp_entries.extend(classpath_rel)

            counter_val = str(counter()).rjust(counter.format_length(), b' ')
            counter_str = '[{}/{}] '.format(counter_val, counter.size)
            self.context.log.info(
                counter_str, 'Metacp-ing ',
                items_to_report_element(cp_entries, 'jar'), ' in ',
                items_to_report_element(
                    [t.address.reference() for t in vts.targets], 'target'),
                ' (', ctx.target.address.spec, ').')

            ctx.ensure_output_dirs_exist()

            tgt, = vts.targets
            with Timer() as timer:
                # Step 1: Convert classpath to SemanticDB
                # ---------------------------------------
                scalac_classpath_path_entries_abs = self.tool_classpath(
                    'workaround-metacp-dependency-classpath')
                scalac_classpath_path_entries = fast_relpath_collection(
                    scalac_classpath_path_entries_abs)
                rsc_index_dir = fast_relpath(ctx.rsc_index_dir,
                                             get_buildroot())
                args = [
                    '--verbose',
                    # NB: We need to add these extra dependencies in order to be able
                    #     to find symbols used by the scalac jars.
                    '--dependency-classpath',
                    os.pathsep.join(
                        dependency_classpath + scalac_classpath_path_entries +
                        fast_relpath_collection(self._jvm_lib_jars_abs)),
                    # NB: The directory to dump the semanticdb jars generated by metacp.
                    '--out',
                    rsc_index_dir,
                    os.pathsep.join(cp_entries),
                ]

                # NB: If we're building a scala library jar,
                #     also request that metacp generate the indices
                #     for the scala synthetics.
                if self._is_scala_core_library(tgt):
                    args = [
                        '--include-scala-library-synthetics',
                    ] + args
                metacp_wu = self._runtool(
                    'scala.meta.cli.Metacp',
                    'metacp',
                    args,
                    distribution,
                    tgt=tgt,
                    input_files=tuple(dependency_classpath +
                                      scalac_classpath_path_entries +
                                      classpath_rel),
                    output_dir=rsc_index_dir)
                metacp_stdout = stdout_contents(metacp_wu)
                metacp_result = json.loads(metacp_stdout)

                metai_classpath = self._collect_metai_classpath(
                    metacp_result, classpath_rel)

                # Step 1.5: metai Index the semanticdbs
                # -------------------------------------
                self._run_metai_tool(distribution, metai_classpath,
                                     rsc_index_dir, tgt)

                abs_output = [(conf, os.path.join(get_buildroot(), x))
                              for conf in self._confs for x in metai_classpath]

                self._metacp_jars_classpath_product.add_for_target(
                    ctx.target,
                    abs_output,
                )

            self._record_target_stats(tgt, len(abs_output), len([]),
                                      timer.elapsed, False, 'metacp')
示例#41
0
    def _register_vts(self, compile_contexts):
        classes_by_source = self.context.products.get_data('classes_by_source')
        classes_by_target = self.context.products.get_data('classes_by_target')
        compile_classpath = self.context.products.get_data('compile_classpath')
        resources_by_target = self.context.products.get_data(
            'resources_by_target')
        product_deps_by_src = self.context.products.get_data(
            'product_deps_by_src')

        # Register class products (and resources generated by annotation processors.)
        computed_classes_by_source_by_context = self._strategy.compute_classes_by_source(
            compile_contexts)
        resource_mapping = self._strategy.compute_resource_mapping(
            compile_contexts)
        for compile_context in compile_contexts:
            computed_classes_by_source = computed_classes_by_source_by_context[
                compile_context]
            target = compile_context.target
            classes_dir = compile_context.classes_dir

            def add_products_by_target(files):
                for f in files:
                    clsname = self._strategy.class_name_for_class_file(
                        compile_context, f)
                    if clsname:
                        # Is a class.
                        classes_by_target[target].add_abs_paths(
                            classes_dir, [f])
                        resources = resource_mapping.get(clsname, [])
                        resources_by_target[target].add_abs_paths(
                            classes_dir, resources)
                    else:
                        # Is a resource.
                        resources_by_target[target].add_abs_paths(
                            classes_dir, [f])

            # Collect classfiles (absolute) that were claimed by sources (relative)
            for source in compile_context.sources:
                classes = computed_classes_by_source.get(source, [])
                add_products_by_target(classes)
                if classes_by_source is not None:
                    classes_by_source[source].add_abs_paths(
                        classes_dir, classes)

            # And any that were not claimed by sources (NB: `None` map key.)
            unclaimed_classes = computed_classes_by_source.get(None, [])
            if unclaimed_classes:
                self.context.log.debug(
                    items_to_report_element(unclaimed_classes, 'class'),
                    ' not claimed by analysis for ',
                    str(compile_context.target))
                add_products_by_target(unclaimed_classes)

        for compile_context in compile_contexts:
            # Register resource products.
            extra_resources = self.extra_products(compile_context.target)
            # Add to resources_by_target (if it was requested).
            if resources_by_target is not None:
                target_resources = resources_by_target[compile_context.target]
                for root, abs_paths in extra_resources:
                    target_resources.add_abs_paths(root, abs_paths)
            # And to the compile_classpath, to make them available within the next round.
            # TODO(stuhood): This is redundant with resources_by_target, but resources_by_target
            # are not available during compilation. https://github.com/pantsbuild/pants/issues/206
            entries = [(conf, root) for conf in self._confs
                       for root, _ in extra_resources]
            compile_classpath.add_for_target(compile_context.target, entries)

            if self.context.products.is_required_data('product_deps_by_src'):
                product_deps_by_src[
                    compile_context.target] = self._strategy.parse_deps(
                        compile_context.analysis_file)
示例#42
0
    def _compile_vts(self, vts, target, sources, analysis_file,
                     upstream_analysis, classpath, outdir, log_dir,
                     zinc_args_file, progress_message, settings,
                     fatal_warnings, zinc_file_manager, counter):
        """Compiles sources for the given vts into the given output dir.

    vts - versioned target set
    sources - sources for this target set
    analysis_file - the analysis file to manipulate
    classpath - a list of classpath entries
    outdir - the output dir to send classes to

    May be invoked concurrently on independent target sets.

    Postcondition: The individual targets in vts are up-to-date, as if each were
                   compiled individually.
    """
        if not sources:
            self.context.log.warn(
                'Skipping {} compile for targets with no sources:\n  {}'.
                format(self.name(), vts.targets))
        else:
            counter_val = str(counter()).rjust(counter.format_length(), b' ')
            counter_str = '[{}/{}] '.format(counter_val, counter.size)
            # Do some reporting.
            self.context.log.info(
                counter_str, 'Compiling ',
                items_to_report_element(sources,
                                        '{} source'.format(self.name())),
                ' in ',
                items_to_report_element(
                    [t.address.reference() for t in vts.targets], 'target'),
                ' (', progress_message, ').')
            with self.context.new_workunit('compile',
                                           labels=[WorkUnitLabel.COMPILER
                                                   ]) as compile_workunit:
                if self.get_options().capture_classpath:
                    self._record_compile_classpath(classpath, vts.targets,
                                                   outdir)

                try:
                    self.compile(
                        self._args,
                        classpath,
                        sources,
                        outdir,
                        upstream_analysis,
                        analysis_file,
                        zinc_args_file,
                        settings,
                        fatal_warnings,
                        zinc_file_manager,
                        self._get_plugin_map(
                            'javac',
                            self._zinc.javac_compiler_plugins_src(self),
                            target),
                        self._get_plugin_map(
                            'scalac',
                            self._zinc.scalac_compiler_plugins_src(self),
                            target),
                    )
                    self._capture_logs(compile_workunit, log_dir)
                except TaskError:
                    if self.get_options().suggest_missing_deps:
                        logs = [
                            path for _, name, _, path in self._find_logs(
                                compile_workunit) if name == self.name()
                        ]
                        if logs:
                            self._find_missing_deps(logs, target)
                    raise
示例#43
0
    def work_for_vts_rsc(vts, ctx):
      # Double check the cache before beginning compilation
      hit_cache = self.check_cache(vts, counter)
      target = ctx.target

      if not hit_cache:
        cp_entries = []

        # Include the current machine's jdk lib jars. This'll blow up remotely.
        # We need a solution for that.
        # Probably something to do with https://github.com/pantsbuild/pants/pull/6346
        distribution = JvmPlatform.preferred_jvm_distribution([ctx.target.platform], strict=True)
        jvm_lib_jars_abs = distribution.find_libs(['rt.jar', 'dt.jar', 'jce.jar', 'tools.jar'])
        cp_entries.extend(jvm_lib_jars_abs)

        classpath_abs = self._zinc.compile_classpath(
          'rsc_classpath',
          ctx.target,
          extra_cp_entries=self._extra_compile_time_classpath)

        jar_deps = [t for t in DependencyContext.global_instance().dependencies_respecting_strict_deps(target)
                    if isinstance(t, JarLibrary)]
        metacp_jar_classpath_abs = [y[1] for y in self._metacp_jars_classpath_product.get_for_targets(
          jar_deps
        )]
        jar_jar_paths = {y[1] for y in self.context.products.get_data('rsc_classpath').get_for_targets(jar_deps)}

        classpath_abs = [c for c in classpath_abs if c not in jar_jar_paths]


        classpath_rel = fast_relpath_collection(classpath_abs)
        metacp_jar_classpath_rel = fast_relpath_collection(metacp_jar_classpath_abs)
        cp_entries.extend(classpath_rel)

        ctx.ensure_output_dirs_exist()

        counter_val = str(counter()).rjust(counter.format_length(), b' ')
        counter_str = '[{}/{}] '.format(counter_val, counter.size)
        self.context.log.info(
          counter_str,
          'Rsc-ing ',
          items_to_report_element(ctx.sources, '{} source'.format(self.name())),
          ' in ',
          items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
          ' (',
          ctx.target.address.spec,
          ').')

        tgt, = vts.targets
        with Timer() as timer:
          # Step 1: Convert classpath to SemanticDB
          # ---------------------------------------
          scalac_classpath_path_entries_abs = self.tool_classpath('workaround-metacp-dependency-classpath')
          scalac_classpath_path_entries = fast_relpath_collection(scalac_classpath_path_entries_abs)
          rsc_index_dir = fast_relpath(ctx.rsc_index_dir, get_buildroot())
          args = [
            '--verbose',
            # NB: Without this setting, rsc will be missing some symbols
            #     from the scala library.
            '--include-scala-library-synthetics', # TODO generate these once and cache them
            # NB: We need to add these extra dependencies in order to be able
            #     to find symbols used by the scalac jars.
            '--dependency-classpath', os.pathsep.join(scalac_classpath_path_entries + list(jar_jar_paths)),
            # NB: The directory to dump the semanticdb jars generated by metacp.
            '--out', rsc_index_dir,
            os.pathsep.join(cp_entries),
          ]
          metacp_wu = self._runtool(
            'scala.meta.cli.Metacp',
            'metacp',
            args,
            distribution,
            tgt=tgt,
            input_files=(scalac_classpath_path_entries + classpath_rel),
            output_dir=rsc_index_dir)
          metacp_stdout = stdout_contents(metacp_wu)
          metacp_result = json.loads(metacp_stdout)


          metai_classpath = self._collect_metai_classpath(
            metacp_result, classpath_rel, jvm_lib_jars_abs)

          # Step 1.5: metai Index the semanticdbs
          # -------------------------------------
          self._run_metai_tool(distribution, metai_classpath, rsc_index_dir, tgt)

          # Step 2: Outline Scala sources into SemanticDB
          # ---------------------------------------------
          rsc_outline_dir = fast_relpath(ctx.rsc_outline_dir, get_buildroot())
          rsc_out = os.path.join(rsc_outline_dir, 'META-INF/semanticdb/out.semanticdb')
          safe_mkdir(os.path.join(rsc_outline_dir, 'META-INF/semanticdb'))
          target_sources = ctx.sources
          args = [
            '-cp', os.pathsep.join(metai_classpath + metacp_jar_classpath_rel),
            '-out', rsc_out,
          ] + target_sources
          self._runtool(
            'rsc.cli.Main',
            'rsc',
            args,
            distribution,
            tgt=tgt,
            # TODO pass the input files from the target snapshot instead of the below
            # input_snapshot = ctx.target.sources_snapshot(scheduler=self.context._scheduler)
            input_files=target_sources + metai_classpath + metacp_jar_classpath_rel,
            output_dir=rsc_outline_dir)
          rsc_classpath = [rsc_outline_dir]

          # Step 2.5: Postprocess the rsc outputs
          # TODO: This is only necessary as a workaround for https://github.com/twitter/rsc/issues/199.
          # Ideally, Rsc would do this on its own.
          self._run_metai_tool(distribution,
            rsc_classpath,
            rsc_outline_dir,
            tgt,
            extra_input_files=(rsc_out,))


          # Step 3: Convert SemanticDB into an mjar
          # ---------------------------------------
          rsc_mjar_file = fast_relpath(ctx.rsc_mjar_file, get_buildroot())
          args = [
            '-out', rsc_mjar_file,
            os.pathsep.join(rsc_classpath),
          ]
          self._runtool(
            'scala.meta.cli.Mjar',
            'mjar',
            args,
            distribution,
            tgt=tgt,
            input_files=(
              rsc_out,
            ),
            output_dir=os.path.dirname(rsc_mjar_file)
            )
          self.context.products.get_data('rsc_classpath').add_for_target(
            ctx.target,
            [(conf, ctx.rsc_mjar_file) for conf in self._confs],
          )

        self._record_target_stats(tgt,
                                  len(cp_entries),
                                  len(target_sources),
                                  timer.elapsed,
                                  False,
                                  'rsc'
                                  )
        # Write any additional resources for this target to the target workdir.
        self.write_extra_resources(ctx)

      # Update the products with the latest classes.
      self.register_extra_products_from_contexts([ctx.target], compile_contexts)
示例#44
0
 def _report_targets(self, prefix, targets, suffix):
     self.context.log.info(
         prefix,
         items_to_report_element([t.address.reference() for t in targets],
                                 'target'), suffix)
示例#45
0
文件: task.py 项目: godwinpinto/pants
  def invalidated(self,
                  targets,
                  invalidate_dependents=False,
                  partition_size_hint=sys.maxint,
                  silent=False,
                  locally_changed_targets=None,
                  fingerprint_strategy=None,
                  topological_order=False):
    """Checks targets for invalidation, first checking the artifact cache.
    Subclasses call this to figure out what to work on.

    targets:                 The targets to check for changes.
    invalidate_dependents:   If True then any targets depending on changed targets are invalidated.
    partition_size_hint:     Each VersionedTargetSet in the yielded list will represent targets
                             containing roughly this number of source files, if possible. Set to
                             sys.maxint for a single VersionedTargetSet. Set to 0 for one
                             VersionedTargetSet per target. It is up to the caller to do the right
                             thing with whatever partitioning it asks for.
    locally_changed_targets: Targets that we've edited locally. If specified, and there aren't too
                             many of them, we keep these in separate partitions from other targets,
                             as these are more likely to have build errors, and so to be rebuilt over
                             and over, and partitioning them separately is a performance win.
    fingerprint_strategy:    A FingerprintStrategy instance, which can do per task, finer grained
                             fingerprinting of a given Target.

    Yields an InvalidationCheck object reflecting the (partitioned) targets.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.
    """
    # TODO(benjy): Compute locally_changed_targets here instead of passing it in? We currently pass
    # it in because JvmCompile already has the source->target mapping for other reasons, and also
    # to selectively enable this feature.

    cache_manager = self.create_cache_manager(invalidate_dependents,
                                              fingerprint_strategy=fingerprint_strategy)

    # We separate locally-modified targets from others by coloring them differently.
    # This can be a performance win, because these targets are more likely to be iterated
    # over, and this preserves "chunk stability" for them.
    colors = {}

    # But we only do so if there aren't too many, or this optimization will backfire.
    locally_changed_target_limit = 10

    if locally_changed_targets and len(locally_changed_targets) < locally_changed_target_limit:
      for t in targets:
        if t in locally_changed_targets:
          colors[t] = 'locally_changed'
        else:
          colors[t] = 'not_locally_changed'
    invalidation_check = cache_manager.check(targets, partition_size_hint, colors, topological_order=topological_order)

    if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled():
      with self.context.new_workunit('cache'):
        cached_vts, uncached_vts = \
          self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
      if cached_vts:
        cached_targets = [vt.target for vt in cached_vts]
        for t in cached_targets:
          self.context.run_tracker.artifact_cache_stats.add_hit('default', t)
        if not silent:
          self._report_targets('Using cached artifacts for ', cached_targets, '.')
      if uncached_vts:
        uncached_targets = [vt.target for vt in uncached_vts]
        for t in uncached_targets:
          self.context.run_tracker.artifact_cache_stats.add_miss('default', t)
        if not silent:
          self._report_targets('No cached artifacts for ', uncached_targets, '.')
      # Now that we've checked the cache, re-partition whatever is still invalid.
      invalidation_check = \
        InvalidationCheck(invalidation_check.all_vts, uncached_vts, partition_size_hint, colors)

    if not silent:
      targets = []
      num_invalid_partitions = len(invalidation_check.invalid_vts_partitioned)
      for vt in invalidation_check.invalid_vts_partitioned:
        targets.extend(vt.targets)

      payloads = [t.payload for t in targets]

      if len(targets):
        msg_elements = ['Invalidated ',
                        items_to_report_element([t.address.reference() for t in targets], 'target')]
        if len(payloads) > 0:
          msg_elements.append(' containing ')
          msg_elements.append(items_to_report_element(payloads, 'payload file'))
        if num_invalid_partitions > 1:
          msg_elements.append(' in %d target partitions' % num_invalid_partitions)
        msg_elements.append('.')
        self.context.log.info(*msg_elements)

    # Yield the result, and then mark the targets as up to date.
    yield invalidation_check
    for vt in invalidation_check.invalid_vts:
      vt.update()  # In case the caller doesn't update.
示例#46
0
文件: task.py 项目: luciferous/pants
  def invalidated(self, targets, only_buildfiles=False, invalidate_dependents=False,
                  partition_size_hint=sys.maxint, silent=False):
    """Checks targets for invalidation, first checking the artifact cache.
    Subclasses call this to figure out what to work on.

    targets:               The targets to check for changes.
    only_buildfiles:       If True, then only the target's BUILD files are checked for changes, not
                           its sources.
    invalidate_dependents: If True then any targets depending on changed targets are invalidated.
    partition_size_hint:   Each VersionedTargetSet in the yielded list will represent targets
                           containing roughly this number of source files, if possible. Set to
                           sys.maxint for a single VersionedTargetSet. Set to 0 for one
                           VersionedTargetSet per target. It is up to the caller to do the right
                           thing with whatever partitioning it asks for.

    Yields an InvalidationCheck object reflecting the (partitioned) targets.

    If no exceptions are thrown by work in the block, the build cache is updated for the targets.
    Note: the artifact cache is not updated. That must be done manually.
    """
    extra_data = [self.invalidate_for()]

    for f in self.invalidate_for_files():
      extra_data.append(hash_file(f))

    cache_manager = CacheManager(self._cache_key_generator,
                                 self._build_invalidator_dir,
                                 invalidate_dependents,
                                 extra_data,
                                 only_externaldeps=only_buildfiles)

    invalidation_check = cache_manager.check(targets, partition_size_hint)

    if invalidation_check.invalid_vts and self.artifact_cache_reads_enabled():
      with self.context.new_workunit('cache'):
        cached_vts, uncached_vts = \
          self.check_artifact_cache(self.check_artifact_cache_for(invalidation_check))
      if cached_vts:
        cached_targets = [vt.target for vt in cached_vts]
        for t in cached_targets:
          self.context.run_tracker.artifact_cache_stats.add_hit('default', t)
        if not silent:
          self._report_targets('Using cached artifacts for ', cached_targets, '.')
      if uncached_vts:
        uncached_targets = [vt.target for vt in uncached_vts]
        for t in uncached_targets:
          self.context.run_tracker.artifact_cache_stats.add_miss('default', t)
        if not silent:
          self._report_targets('No cached artifacts for ', uncached_targets, '.')
      # Now that we've checked the cache, re-partition whatever is still invalid.
      invalidation_check = \
        InvalidationCheck(invalidation_check.all_vts, uncached_vts, partition_size_hint)

    if not silent:
      targets = []
      sources = []
      num_invalid_partitions = len(invalidation_check.invalid_vts_partitioned)
      for vt in invalidation_check.invalid_vts_partitioned:
        targets.extend(vt.targets)
        sources.extend(vt.cache_key.sources)
      if len(targets):
        msg_elements = ['Invalidated ',
                        items_to_report_element([t.address.reference() for t in targets], 'target')]
        if len(sources) > 0:
          msg_elements.append(' containing ')
          msg_elements.append(items_to_report_element(sources, 'source file'))
        if num_invalid_partitions > 1:
          msg_elements.append(' in %d target partitions' % num_invalid_partitions)
        msg_elements.append('.')
        self.context.log.info(*msg_elements)

    # Yield the result, and then mark the targets as up to date.
    yield invalidation_check
    if not self.dry_run:
      for vt in invalidation_check.invalid_vts:
        vt.update()  # In case the caller doesn't update.
示例#47
0
    def work_for_vts_rsc_jar_library(vts, ctx):

      cp_entries = []

      # Include the current machine's jdk lib jars. This'll blow up remotely.
      # We need a solution for that.
      # Probably something to do with https://github.com/pantsbuild/pants/pull/6346
      # TODO perhaps determine the platform of the jar and use that here.
      # https://github.com/pantsbuild/pants/issues/6547
      distribution = JvmPlatform.preferred_jvm_distribution([], strict=True)
      jvm_lib_jars_abs = distribution.find_libs(['rt.jar', 'dt.jar', 'jce.jar', 'tools.jar'])
      cp_entries.extend(jvm_lib_jars_abs)

      # TODO use compile_classpath
      classpath_abs = [
        path for (conf, path) in
        self.context.products.get_data('rsc_classpath').get_for_target(ctx.target)
      ]
      dependency_classpath = self._zinc.compile_classpath(
        'compile_classpath',
        ctx.target,
        extra_cp_entries=self._extra_compile_time_classpath)
      classpath_rel = fast_relpath_collection(classpath_abs)
      cp_entries.extend(classpath_rel)

      counter_val = str(counter()).rjust(counter.format_length(), b' ')
      counter_str = '[{}/{}] '.format(counter_val, counter.size)
      self.context.log.info(
        counter_str,
        'Metacp-ing ',
        items_to_report_element(cp_entries, 'jar'),
        ' in ',
        items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
        ' (',
        ctx.target.address.spec,
        ').')

      ctx.ensure_output_dirs_exist()

      tgt, = vts.targets
      with Timer() as timer:
      # Step 1: Convert classpath to SemanticDB
        # ---------------------------------------
        scalac_classpath_path_entries_abs = self.tool_classpath('workaround-metacp-dependency-classpath')
        scalac_classpath_path_entries = fast_relpath_collection(scalac_classpath_path_entries_abs)
        rsc_index_dir = fast_relpath(ctx.rsc_index_dir, get_buildroot())
        args = [
          '--verbose',
          # NB: Without this setting, rsc will be missing some symbols
          #     from the scala library.
          '--include-scala-library-synthetics', # TODO generate these once and cache them
          # NB: We need to add these extra dependencies in order to be able
          #     to find symbols used by the scalac jars.
          '--dependency-classpath', os.pathsep.join(dependency_classpath + scalac_classpath_path_entries),
          # NB: The directory to dump the semanticdb jars generated by metacp.
          '--out', rsc_index_dir,
          os.pathsep.join(cp_entries),
        ]
        metacp_wu = self._runtool(
          'scala.meta.cli.Metacp',
          'metacp',
          args,
          distribution,
          tgt=tgt,
          input_files=(scalac_classpath_path_entries + classpath_rel),
          output_dir=rsc_index_dir)
        metacp_stdout = stdout_contents(metacp_wu)
        metacp_result = json.loads(metacp_stdout)

        metai_classpath = self._collect_metai_classpath(
          metacp_result, classpath_rel, jvm_lib_jars_abs)

        # Step 1.5: metai Index the semanticdbs
        # -------------------------------------
        self._run_metai_tool(distribution, metai_classpath, rsc_index_dir, tgt)

        abs_output = [(conf, os.path.join(get_buildroot(), x))
                      for conf in self._confs for x in metai_classpath]

        self._metacp_jars_classpath_product.add_for_target(
          ctx.target,
          abs_output,
        )

      self._record_target_stats(tgt,
          len(abs_output),
          len([]),
          timer.elapsed,
          False,
          'metacp'
        )