Exemplo n.º 1
0
  def execute(self):
    # If none of our computed products are necessary, return immediately.
    if not self._create_products_if_should_run():
      return

    zinc_analysis = self.context.products.get_data('zinc_analysis')
    classpath_product = self.context.products.get_data('runtime_classpath')
    classes_by_source = self.context.products.get_data('classes_by_source')
    product_deps_by_src = self.context.products.get_data('product_deps_by_src')

    fingerprint_strategy = DependencyContext.global_instance().create_fingerprint_strategy(
        classpath_product)

    targets = zinc_analysis.keys()
    with self.invalidated(targets,
                          fingerprint_strategy=fingerprint_strategy,
                          invalidate_dependents=True) as invalidation_check:
      # Extract and parse products for any relevant targets.
      for vt in invalidation_check.all_vts:
        summary_json_file = self._summary_json_file(vt)
        cp_entry, _, analysis_file = zinc_analysis[vt.target]
        if not vt.valid:
          self._extract_analysis(vt.target, analysis_file, summary_json_file)
        self._register_products(vt.target,
                                cp_entry,
                                summary_json_file,
                                classes_by_source,
                                product_deps_by_src)
Exemplo n.º 2
0
  def _default_work_for_vts(self, vts, ctx, input_classpath_product_key, counter, all_compile_contexts, output_classpath_product):
    progress_message = ctx.target.address.spec

    # Double check the cache before beginning compilation
    hit_cache = self.check_cache(vts, counter)

    if not hit_cache:
      # Compute the compile classpath for this target.
      dependency_cp_entries = self._zinc.compile_classpath_entries(
        input_classpath_product_key,
        ctx.target,
        extra_cp_entries=self._extra_compile_time_classpath,
      )

      upstream_analysis = dict(self._upstream_analysis(all_compile_contexts, dependency_cp_entries))

      is_incremental = self.should_compile_incrementally(vts, ctx)
      if not is_incremental:
        # Purge existing analysis file in non-incremental mode.
        safe_delete(ctx.analysis_file)
        # Work around https://github.com/pantsbuild/pants/issues/3670
        safe_rmtree(ctx.classes_dir.path)

      dep_context = DependencyContext.global_instance()
      tgt, = vts.targets
      compiler_option_sets = dep_context.defaulted_property(tgt, 'compiler_option_sets')
      zinc_file_manager = dep_context.defaulted_property(tgt, 'zinc_file_manager')
      with Timer() as timer:
        directory_digest = self._compile_vts(vts,
                          ctx,
                          upstream_analysis,
                          dependency_cp_entries,
                          progress_message,
                          tgt.platform,
                          compiler_option_sets,
                          zinc_file_manager,
                          counter)

      ctx.classes_dir = ClasspathEntry(ctx.classes_dir.path, directory_digest)

      self._record_target_stats(tgt,
                                len(dependency_cp_entries),
                                len(ctx.sources),
                                timer.elapsed,
                                is_incremental,
                                'compile')

      # Write any additional resources for this target to the target workdir.
      self.write_extra_resources(ctx)

      # Jar the compiled output.
      self._create_context_jar(ctx)

    # Update the products with the latest classes.
    output_classpath_product.add_for_target(
      ctx.target,
      [(conf, self._classpath_for_context(ctx)) for conf in self._confs],
    )
    self.register_extra_products_from_contexts([ctx.target], all_compile_contexts)
Exemplo n.º 3
0
  def _get_plugin_map(self, compiler, options_src, target):
    """Returns a map of plugin to args, for the given compiler.

    Only plugins that must actually be activated will be present as keys in the map.
    Plugins with no arguments will have an empty list as a value.

    Active plugins and their args will be gathered from (in order of precedence):
    - The <compiler>_plugins and <compiler>_plugin_args fields of the target, if it has them.
    - The <compiler>_plugins and <compiler>_plugin_args options of this task, if it has them.
    - The <compiler>_plugins and <compiler>_plugin_args fields of this task, if it has them.

    Note that in-repo plugins will not be returned, even if requested, when building
    themselves.  Use published versions of those plugins for that.

    See:
    - examples/src/java/org/pantsbuild/example/javac/plugin/README.md.
    - examples/src/scala/org/pantsbuild/example/scalac/plugin/README.md

    :param compiler: one of 'javac', 'scalac'.
    :param options_src: A JvmToolMixin instance providing plugin options.
    :param target: The target whose plugins we compute.
    """
    # Note that we get() options and getattr() target fields and task methods,
    # so we're robust when those don't exist (or are None).
    plugins_key = '{}_plugins'.format(compiler)

    dep_context = DependencyContext.global_instance()
    compiler_option_sets = dep_context.defaulted_property(target, 'compiler_option_sets')

    requested_plugins = (
      tuple(getattr(self, plugins_key, []) or []) +
      tuple(options_src.get_options().get(plugins_key, []) or []) +
      tuple((getattr(target, plugins_key, []) or [])) +
      tuple(
        plugin_name
        for option_set_name in compiler_option_sets
        for plugin_name in self.get_options().compiler_option_sets_enabled_scalac_plugins.get(option_set_name, [])
      )
    )
    # Allow multiple flags and also comma-separated values in a single flag.
    requested_plugins = {p for val in requested_plugins for p in val.split(',')}

    plugin_args_key = '{}_plugin_args'.format(compiler)
    available_plugin_args = {}
    available_plugin_args.update(getattr(self, plugin_args_key, {}) or {})
    available_plugin_args.update(options_src.get_options().get(plugin_args_key, {}) or {})
    available_plugin_args.update(getattr(target, plugin_args_key, {}) or {})

    # From all available args, pluck just the ones for the selected plugins.
    plugin_map = {}
    for plugin in requested_plugins:
      # Don't attempt to use a plugin while building that plugin.
      # This avoids a bootstrapping problem.  Note that you can still
      # use published plugins on themselves, just not in-repo plugins.
      if target not in self._plugin_targets(compiler).get(plugin, {}):
        plugin_map[plugin] = available_plugin_args.get(plugin, [])
    return plugin_map
Exemplo n.º 4
0
 def _dependencies_needed_in_classpath(target):
     if isinstance(target, JvmTarget):
         return [
             dep
             for dep in DependencyContext.global_instance().dependencies_respecting_strict_deps(
                 target
             )
         ]
     else:
         return [dep for dep in target.closure()]
Exemplo n.º 5
0
  def execute(self):
    # In case we have no relevant targets and return early, create the requested product maps.
    self.create_empty_extra_products()

    relevant_targets = list(self.context.targets(predicate=self.select))

    if not relevant_targets:
      return

    # Clone the compile_classpath to the runtime_classpath.
    classpath_product = self.create_runtime_classpath()

    def classpath_for_context(context):
      if self.get_options().use_classpath_jars:
        return context.jar_file
      return context.classes_dir

    fingerprint_strategy = DependencyContext.global_instance().create_fingerprint_strategy(
        classpath_product)
    # Note, JVM targets are validated (`vts.update()`) as they succeed.  As a result,
    # we begin writing artifacts out to the cache immediately instead of waiting for
    # all targets to finish.
    with self.invalidated(relevant_targets,
                          invalidate_dependents=True,
                          fingerprint_strategy=fingerprint_strategy,
                          topological_order=True) as invalidation_check:


      # Register runtime classpath products for all targets.
      compile_contexts = {vt.target: self.create_compile_context(vt.target, vt.results_dir)
                          for vt in invalidation_check.all_vts}
      for ccs in compile_contexts.values():
        cc = self.select_runtime_context(ccs)
        classpath_product.add_for_target(cc.target, [(conf, classpath_for_context(cc))
                                                     for conf in self._confs])

      # Register products for valid targets.
      valid_targets = [vt.target for vt in invalidation_check.all_vts if vt.valid]
      self.register_extra_products_from_contexts(valid_targets, compile_contexts)

      # Build any invalid targets (which will register products in the background).
      if invalidation_check.invalid_vts:
        self.do_compile(
          invalidation_check,
          compile_contexts,
        )

      if not self.get_options().use_classpath_jars:
        # Once compilation has completed, replace the classpath entry for each target with
        # its jar'd representation.
        for ccs in compile_contexts.values():
          cc = self.select_runtime_context(ccs)
          for conf in self._confs:
            classpath_product.remove_for_target(cc.target, [(conf, cc.classes_dir)])
            classpath_product.add_for_target(cc.target, [(conf, cc.jar_file)])
Exemplo n.º 6
0
    def _flat_non_modulizable_deps_for_modulizable_targets(
        self, modulizable_targets: FrozenOrderedSet[Target]
    ) -> Dict[Target, FrozenOrderedSet[Target]]:
        """Collect flat dependencies for targets that will end up in libraries. When visiting a
        target, we don't expand the dependencies that are modulizable targets, since we need to
        reflect those relationships in a separate way later on.

        E.g. if A -> B -> C -> D and A -> E and B -> F, if modulizable_targets = {A, B}, the resulting map will be:
         {
            A -> {E},
            B -> {C, F, D},

            // Some other entries for intermediate dependencies
            C -> {D},
            E -> {},
            F -> {},
         }
        Therefore, when computing the library entries for A, we need to walk the (transitive) modulizable dependency graph,
        and accumulate the entries in the map.

        This function takes strict_deps into account when generating the graph.
        """
        flat_deps: Dict[Target, FrozenOrderedSet[Target]] = {}

        def create_entry_for_target(target: Target) -> None:
            target_key = target
            if self._is_strict_deps(target):
                dependencies = target.strict_dependencies(DependencyContext.global_instance())
            else:
                dependencies = target.dependencies
            non_modulizable_deps = [dep for dep in dependencies if dep not in modulizable_targets]
            entry: OrderedSet[Target] = OrderedSet()
            for dep in non_modulizable_deps:
                entry.update(flat_deps.get(dep, set()).union({dep}))
            flat_deps[target_key] = FrozenOrderedSet(entry)

        targets_with_strict_deps = [t for t in modulizable_targets if self._is_strict_deps(t)]
        for t in targets_with_strict_deps:
            flat_deps[t] = FrozenOrderedSet(
                t.strict_dependencies(DependencyContext.global_instance())
            )

        self.context.build_graph.walk_transitive_dependency_graph(
            addresses=[t.address for t in modulizable_targets if not self._is_strict_deps(t)],
            # Work is to populate the entry of the map by merging the entries of all of the deps.
            work=create_entry_for_target,
            # We pre-populate the dict according to several principles (e.g. strict_deps),
            # so a target being there means that there is no need to expand.
            predicate=lambda target: target not in flat_deps.keys(),
            # We want children to populate their entries in the map before the parents,
            # so that we are guaranteed to have entries for all dependencies before
            # computing a target's entry.
            postorder=True,
        )
        return flat_deps
Exemplo n.º 7
0
  def execute(self):
    # In case we have no relevant targets and return early, create the requested product maps.
    self.create_empty_extra_products()

    relevant_targets = list(self.context.targets(predicate=self.select))

    if not relevant_targets:
      return

    # Clone the compile_classpath to the runtime_classpath.
    classpath_product = self.create_runtime_classpath()

    def classpath_for_context(context):
      if self.get_options().use_classpath_jars:
        return context.jar_file
      return context.classes_dir

    fingerprint_strategy = DependencyContext.global_instance().create_fingerprint_strategy(
        classpath_product)
    # Note, JVM targets are validated (`vts.update()`) as they succeed.  As a result,
    # we begin writing artifacts out to the cache immediately instead of waiting for
    # all targets to finish.
    with self.invalidated(relevant_targets,
                          invalidate_dependents=True,
                          fingerprint_strategy=fingerprint_strategy,
                          topological_order=True) as invalidation_check:


      # Register runtime classpath products for all targets.
      compile_contexts = {vt.target: self.create_compile_context(vt.target, vt.results_dir)
                          for vt in invalidation_check.all_vts}
      for ccs in compile_contexts.values():
        cc = self.select_runtime_context(ccs)
        classpath_product.add_for_target(cc.target, [(conf, classpath_for_context(cc))
                                                     for conf in self._confs])

      # Register products for valid targets.
      valid_targets = [vt.target for vt in invalidation_check.all_vts if vt.valid]
      self.register_extra_products_from_contexts(valid_targets, compile_contexts)

      # Build any invalid targets (which will register products in the background).
      if invalidation_check.invalid_vts:
        self.do_compile(
          invalidation_check,
          compile_contexts,
        )

      if not self.get_options().use_classpath_jars:
        # Once compilation has completed, replace the classpath entry for each target with
        # its jar'd representation.
        for ccs in compile_contexts.values():
          cc = self.select_runtime_context(ccs)
          for conf in self._confs:
            classpath_product.remove_for_target(cc.target, [(conf, cc.classes_dir)])
            classpath_product.add_for_target(cc.target, [(conf, cc.jar_file)])
Exemplo n.º 8
0
 def create_entry_for_target(target: Target) -> None:
     target_key = target
     if self._is_strict_deps(target):
         dependencies = target.strict_dependencies(DependencyContext.global_instance())
     else:
         dependencies = target.dependencies
     non_modulizable_deps = [dep for dep in dependencies if dep not in modulizable_targets]
     entry: OrderedSet[Target] = OrderedSet()
     for dep in non_modulizable_deps:
         entry.update(flat_deps.get(dep, set()).union({dep}))
     flat_deps[target_key] = FrozenOrderedSet(entry)
Exemplo n.º 9
0
        def work_for_vts(vts, ctx):
            progress_message = ctx.target.address.spec

            # Double check the cache before beginning compilation
            hit_cache = self.check_cache(vts, counter)

            if not hit_cache:
                # Compute the compile classpath for this target.
                dependency_cp_entries = self._zinc.compile_classpath_entries(
                    'runtime_classpath',
                    ctx.target,
                    extra_cp_entries=self._extra_compile_time_classpath,
                )

                upstream_analysis = dict(
                    self._upstream_analysis(all_compile_contexts,
                                            dependency_cp_entries))

                is_incremental = self.should_compile_incrementally(vts, ctx)
                if not is_incremental:
                    # Purge existing analysis file in non-incremental mode.
                    safe_delete(ctx.analysis_file)
                    # Work around https://github.com/pantsbuild/pants/issues/3670
                    safe_rmtree(ctx.classes_dir)

                dep_context = DependencyContext.global_instance()
                tgt, = vts.targets
                compiler_option_sets = dep_context.defaulted_property(
                    tgt, lambda x: x.compiler_option_sets)
                zinc_file_manager = dep_context.defaulted_property(
                    tgt, lambda x: x.zinc_file_manager)
                with Timer() as timer:
                    self._compile_vts(vts, ctx, upstream_analysis,
                                      dependency_cp_entries, progress_message,
                                      tgt.platform, compiler_option_sets,
                                      zinc_file_manager, counter)
                self._record_target_stats(tgt, len(dependency_cp_entries),
                                          len(ctx.sources), timer.elapsed,
                                          is_incremental, 'compile')

                # Write any additional resources for this target to the target workdir.
                self.write_extra_resources(ctx)

                # Jar the compiled output.
                self._create_context_jar(ctx)

            # Update the products with the latest classes.
            classpath_product.add_for_target(
                ctx.target,
                [(conf, self._classpath_for_context(ctx))
                 for conf in self._confs],
            )
            self.register_extra_products_from_contexts([ctx.target],
                                                       all_compile_contexts)
Exemplo n.º 10
0
    def _default_work_for_vts(self, vts, ctx, input_classpath_product_key,
                              counter, all_compile_contexts,
                              output_classpath_product):
        progress_message = ctx.target.address.spec

        # Double check the cache before beginning compilation
        hit_cache = self.check_cache(vts, counter)

        if not hit_cache:
            # Compute the compile classpath for this target.
            dependency_cp_entries = self._zinc.compile_classpath_entries(
                input_classpath_product_key,
                ctx.target,
                extra_cp_entries=self._extra_compile_time_classpath,
            )

            upstream_analysis = dict(
                self._upstream_analysis(all_compile_contexts,
                                        dependency_cp_entries))

            is_incremental = self.should_compile_incrementally(vts, ctx)
            if not is_incremental:
                # Purge existing analysis file in non-incremental mode.
                safe_delete(ctx.analysis_file)
                # Work around https://github.com/pantsbuild/pants/issues/3670
                safe_rmtree(ctx.classes_dir.path)

            dep_context = DependencyContext.global_instance()
            tgt, = vts.targets
            compiler_option_sets = dep_context.defaulted_property(
                tgt, 'compiler_option_sets')
            zinc_file_manager = dep_context.defaulted_property(
                tgt, 'zinc_file_manager')
            with Timer() as timer:
                directory_digest = self._compile_vts(
                    vts, ctx, upstream_analysis, dependency_cp_entries,
                    progress_message, tgt.platform, compiler_option_sets,
                    zinc_file_manager, counter)

            # Store the produced Digest (if any).
            self._set_directory_digest_for_compile_context(
                ctx, directory_digest)

            self._record_target_stats(tgt, len(dependency_cp_entries),
                                      len(ctx.sources), timer.elapsed,
                                      is_incremental, 'compile')

        # Update the products with the latest classes.
        output_classpath_product.add_for_target(
            ctx.target,
            [(conf, self._classpath_for_context(ctx)) for conf in self._confs],
        )
        self.register_extra_products_from_contexts([ctx.target],
                                                   all_compile_contexts)
Exemplo n.º 11
0
  def compile_classpath(self, classpath_product_key, target, extra_cp_entries=None):
    """Compute the compile classpath for the given target."""
    classpath_product = self._products.get_data(classpath_product_key)

    if DependencyContext.global_instance().defaulted_property(target, lambda x: x.strict_deps):
      dependencies = target.strict_dependencies(DependencyContext.global_instance())
    else:
      dependencies = DependencyContext.global_instance().all_dependencies(target)

    all_extra_cp_entries = list(self._compiler_plugins_cp_entries())
    if extra_cp_entries:
      all_extra_cp_entries.extend(extra_cp_entries)

    # TODO: We convert dependencies to an iterator here in order to _preserve_ a bug that will be
    # fixed in https://github.com/pantsbuild/pants/issues/4874: `ClasspathUtil.compute_classpath`
    # expects to receive a list, but had been receiving an iterator. In the context of an
    # iterator, `excludes` are not applied
    # in ClasspathProducts.get_product_target_mappings_for_targets.
    return ClasspathUtil.compute_classpath(iter(dependencies),
                                           classpath_product,
                                           all_extra_cp_entries,
                                           self.DEFAULT_CONFS)
Exemplo n.º 12
0
  def compile_classpath(self, classpath_product_key, target, extra_cp_entries=None):
    """Compute the compile classpath for the given target."""
    classpath_product = self._products.get_data(classpath_product_key)

    if DependencyContext.global_instance().defaulted_property(target, lambda x: x.strict_deps):
      dependencies = target.strict_dependencies(DependencyContext.global_instance())
    else:
      dependencies = DependencyContext.global_instance().all_dependencies(target)

    all_extra_cp_entries = list(self._compiler_plugins_cp_entries())
    if extra_cp_entries:
      all_extra_cp_entries.extend(extra_cp_entries)

    # TODO: We convert dependencies to an iterator here in order to _preserve_ a bug that will be
    # fixed in https://github.com/pantsbuild/pants/issues/4874: `ClasspathUtil.compute_classpath`
    # expects to receive a list, but had been receiving an iterator. In the context of an
    # iterator, `excludes` are not applied
    # in ClasspathProducts.get_product_target_mappings_for_targets.
    return ClasspathUtil.compute_classpath(iter(dependencies),
                                           classpath_product,
                                           all_extra_cp_entries,
                                           self.DEFAULT_CONFS)
Exemplo n.º 13
0
  def execute(self):
    requested_compiler = JvmPlatform.global_instance().get_options().compiler
    if requested_compiler != self.compiler_name:
      return

    if requested_compiler == self.Compiler.ZINC and self.compiler_name == self.Compiler.RSC:
      # Issue a deprecation warning (above) and rewrite zinc to rsc, as zinc is being deprecated.
      JvmPlatform.global_instance().get_options().compiler = RankedValue(0, self.compiler_name)
    elif requested_compiler != self.compiler_name:
      # If the requested compiler is not the one supported by this task, log and abort
      self.context.log.debug('Requested an unsupported compiler [{}], aborting'.format(requested_compiler))
      return

    # In case we have no relevant targets and return early, create the requested product maps.
    self.create_empty_extra_products()

    relevant_targets = list(self.context.targets(predicate=self.select))

    if not relevant_targets:
      return

    # Clone the compile_classpath to the runtime_classpath.
    classpath_product = self.create_runtime_classpath()

    fingerprint_strategy = DependencyContext.global_instance().create_fingerprint_strategy(
        classpath_product)
    # Note, JVM targets are validated (`vts.update()`) as they succeed.  As a result,
    # we begin writing artifacts out to the cache immediately instead of waiting for
    # all targets to finish.
    with self.invalidated(relevant_targets,
                          invalidate_dependents=True,
                          fingerprint_strategy=fingerprint_strategy,
                          topological_order=True) as invalidation_check:

      compile_contexts = {vt.target: self.create_compile_context(vt.target, vt.results_dir)
                          for vt in invalidation_check.all_vts}

      self.do_compile(
        invalidation_check,
        compile_contexts,
        classpath_product,
      )

      if not self.get_options().use_classpath_jars:
        # Once compilation has completed, replace the classpath entry for each target with
        # its jar'd representation.
        for ccs in compile_contexts.values():
          cc = self.select_runtime_context(ccs)
          for conf in self._confs:
            classpath_product.remove_for_target(cc.target, [(conf, cc.classes_dir)])
            classpath_product.add_for_target(cc.target, [(conf, cc.jar_file)])
Exemplo n.º 14
0
        def work_for_vts(vts, ctx):
            progress_message = ctx.target.address.spec

            # Double check the cache before beginning compilation
            hit_cache = check_cache(vts)

            if not hit_cache:
                # Compute the compile classpath for this target.
                cp_entries = [ctx.classes_dir]
                cp_entries.extend(
                    self._zinc.compile_classpath(
                        classpath_product_key,
                        ctx.target,
                        extra_cp_entries=self._extra_compile_time_classpath,
                        zinc_compile_instance=self))
                upstream_analysis = dict(
                    self._upstream_analysis(compile_contexts, cp_entries))

                is_incremental = should_compile_incrementally(vts, ctx)
                if not is_incremental:
                    # Purge existing analysis file in non-incremental mode.
                    safe_delete(ctx.analysis_file)
                    # Work around https://github.com/pantsbuild/pants/issues/3670
                    safe_rmtree(ctx.classes_dir)

                dep_context = DependencyContext.global_instance()
                tgt, = vts.targets
                fatal_warnings = dep_context.defaulted_property(
                    tgt, lambda x: x.fatal_warnings)
                zinc_file_manager = dep_context.defaulted_property(
                    tgt, lambda x: x.zinc_file_manager)
                with Timer() as timer:
                    self._compile_vts(vts, ctx.target, ctx.sources,
                                      ctx.analysis_file, upstream_analysis,
                                      cp_entries, ctx.classes_dir, ctx.log_dir,
                                      ctx.zinc_args_file, progress_message,
                                      tgt.platform, fatal_warnings,
                                      zinc_file_manager, counter)
                self._record_target_stats(tgt, len(cp_entries),
                                          len(ctx.sources), timer.elapsed,
                                          is_incremental)

                # Write any additional resources for this target to the target workdir.
                self.write_extra_resources(ctx)

                # Jar the compiled output.
                self._create_context_jar(ctx)

            # Update the products with the latest classes.
            self._register_vts([ctx])
Exemplo n.º 15
0
    def execute(self):
        # If none of our computed products are necessary, return immediately.
        deprecated_conditional(
            lambda: self.context.products.is_required_data('classes_by_source'
                                                           ), '1.20.0.dev2',
            'The `classes_by_source` product depends on internal compiler details and is no longer produced.'
        )
        deprecated_conditional(
            lambda: self.context.products.is_required_data(
                'product_deps_by_src'), '1.20.0.dev2',
            'The `product_deps_by_src` product depends on internal compiler details and is no longer produced. '
            'For similar functionality consume `product_deps_by_target`.')

        if not self._create_products_if_should_run():
            return

        classpath_product = self.context.products.get_data('runtime_classpath')
        product_deps_by_target = self.context.products.get_data(
            'product_deps_by_target')

        fingerprint_strategy = DependencyContext.global_instance(
        ).create_fingerprint_strategy(classpath_product)

        # classpath fingerprint strategy only works on targets with a classpath.
        targets = [
            target for target in self.context.targets()
            if hasattr(target, 'strict_deps')
        ]
        with self.invalidated(
                targets,
                fingerprint_strategy=fingerprint_strategy,
                invalidate_dependents=True) as invalidation_check:
            for vt in invalidation_check.all_vts:
                # A list of class paths to the artifacts created by the target we are computing deps for.
                target_artifact_classpaths = [
                    path
                    for _, path in classpath_product.get_for_target(vt.target)
                ]
                potential_deps_classpaths = self._zinc.compile_classpath(
                    'runtime_classpath', vt.target)

                jdeps_output_json = self._jdeps_output_json(vt)
                if not vt.valid:
                    self._run_jdeps_analysis(vt.target,
                                             target_artifact_classpaths,
                                             potential_deps_classpaths,
                                             jdeps_output_json)
                self._register_products(vt.target, jdeps_output_json,
                                        product_deps_by_target)
Exemplo n.º 16
0
    def create_extra_products_for_targets(self, targets):
        if not targets:
            return
        if self.context.products.is_required_data("zinc_args"):
            zinc_args = self.context.products.get_data("zinc_args")
            with self.invalidated(
                    targets, invalidate_dependents=False,
                    topological_order=True) as invalidation_check:

                compile_contexts = {
                    vt.target:
                    self.create_compile_context(vt.target, vt.results_dir)
                    for vt in invalidation_check.all_vts
                }
                runtime_compile_contexts = {
                    target: self.select_runtime_context(cc)
                    for target, cc in compile_contexts.items()
                }
                for vt in invalidation_check.all_vts:
                    dependency_classpath = self._zinc.compile_classpath_entries(
                        "runtime_classpath",
                        vt.target,
                        extra_cp_entries=self._extra_compile_time_classpath,
                    )
                    dep_context = DependencyContext.global_instance()
                    compiler_option_sets = dep_context.defaulted_property(
                        vt.target, "compiler_option_sets")
                    zinc_file_manager = dep_context.defaulted_property(
                        vt.target, "zinc_file_manager")
                    ctx = runtime_compile_contexts[vt.target]
                    absolute_classpath = (ctx.classes_dir.path, ) + tuple(
                        ce.path for ce in dependency_classpath)
                    upstream_analysis = dict(
                        self._upstream_analysis(compile_contexts,
                                                dependency_classpath))
                    zinc_args[vt.target] = self.create_zinc_args(
                        ctx,
                        self._args,
                        upstream_analysis,
                        absolute_classpath,
                        vt.target.platform,
                        compiler_option_sets,
                        zinc_file_manager,
                        self._get_plugin_map("javac", Java.global_instance(),
                                             ctx.target),
                        self._get_plugin_map("scalac",
                                             ScalaPlatform.global_instance(),
                                             ctx.target),
                    )
Exemplo n.º 17
0
    def work_for_vts(vts, ctx):
      progress_message = ctx.target.address.spec

      # Double check the cache before beginning compilation
      hit_cache = self.check_cache(vts, counter)

      if not hit_cache:
        # Compute the compile classpath for this target.
        cp_entries = self._cp_entries_for_ctx(ctx, 'runtime_classpath')

        upstream_analysis = dict(self._upstream_analysis(all_compile_contexts, cp_entries))

        is_incremental = self.should_compile_incrementally(vts, ctx)
        if not is_incremental:
          # Purge existing analysis file in non-incremental mode.
          safe_delete(ctx.analysis_file)
          # Work around https://github.com/pantsbuild/pants/issues/3670
          safe_rmtree(ctx.classes_dir)

        dep_context = DependencyContext.global_instance()
        tgt, = vts.targets
        fatal_warnings = dep_context.defaulted_property(tgt, lambda x: x.fatal_warnings)
        zinc_file_manager = dep_context.defaulted_property(tgt, lambda x: x.zinc_file_manager)
        with Timer() as timer:
          self._compile_vts(vts,
                            ctx,
                            upstream_analysis,
                            cp_entries,
                            progress_message,
                            tgt.platform,
                            fatal_warnings,
                            zinc_file_manager,
                            counter)
        self._record_target_stats(tgt,
                                  len(cp_entries),
                                  len(ctx.sources),
                                  timer.elapsed,
                                  is_incremental,
                                  'compile')

        # Write any additional resources for this target to the target workdir.
        self.write_extra_resources(ctx)

        # Jar the compiled output.
        self._create_context_jar(ctx)

      # Update the products with the latest classes.
      self.register_extra_products_from_contexts([ctx.target], all_compile_contexts)
Exemplo n.º 18
0
  def execute(self):
    if self._skip(self.get_options()):
      return

    classpath_product = self.context.products.get_data('runtime_classpath')
    fingerprint_strategy = DependencyContext.global_instance().create_fingerprint_strategy(
        classpath_product)

    targets = self.context.products.get_data('zinc_analysis').keys()

    with self.invalidated(targets,
                          fingerprint_strategy=fingerprint_strategy,
                          invalidate_dependents=True) as invalidation_check:
      for vt in invalidation_check.invalid_vts:
        product_deps_by_src = self.context.products.get_data('product_deps_by_src').get(vt.target)
        if product_deps_by_src is not None:
          self.check(vt.target, product_deps_by_src)
Exemplo n.º 19
0
  def _zinc_outline(self, ctx, relative_classpath, target_sources, youtline_args):
    zinc_youtline_args = [f'-S{arg}' for arg in youtline_args]
    zinc_file_manager = DependencyContext.global_instance().defaulted_property(ctx.target, 'zinc_file_manager')

    def relative_to_exec_root(path):
      return fast_relpath(path, get_buildroot())

    analysis_cache = relative_to_exec_root(ctx.analysis_file)
    classes_dir = relative_to_exec_root(ctx.classes_dir.path)

    scalac_classpath_entries = self.scalac_classpath_entries()
    scala_path = [relative_to_exec_root(classpath_entry.path) for classpath_entry in scalac_classpath_entries]

    zinc_args = []
    zinc_args.extend([
      '-log-level', self.get_options().level,
      '-analysis-cache', analysis_cache,
      '-classpath', os.pathsep.join(relative_classpath),
    ])

    compiler_bridge_classpath_entry = self._zinc.compile_compiler_bridge(self.context)
    zinc_args.extend(['-compiled-bridge-jar', relative_to_exec_root(compiler_bridge_classpath_entry.path)])
    zinc_args.extend(['-scala-path', ':'.join(scala_path)])

    zinc_args.extend(zinc_youtline_args)

    if not zinc_file_manager:
      zinc_args.append('-no-zinc-file-manager')

    jvm_options = []

    if self.javac_classpath():
      jvm_options.extend([f"-Xbootclasspath/p:{':'.join(self.javac_classpath())}"])

    jvm_options.extend(self._jvm_options)

    zinc_args.extend(ctx.sources)

    self.log_zinc_file(ctx.analysis_file)
    self.write_argsfile(ctx, zinc_args)

    return self.execution_strategy.match({
      self.ExecutionStrategy.hermetic: lambda: None,
      self.ExecutionStrategy.subprocess: lambda: self._compile_nonhermetic(jvm_options, ctx, classes_dir),
      self.ExecutionStrategy.nailgun: lambda: self._compile_nonhermetic(jvm_options, ctx, classes_dir),
    })()
Exemplo n.º 20
0
  def execute(self):
    if self._skip(self.get_options()):
      return

    classpath_product = self.context.products.get_data('runtime_classpath')
    fingerprint_strategy = DependencyContext.global_instance().create_fingerprint_strategy(
        classpath_product)

    targets = list(self.context.products.get_data('zinc_analysis').keys())

    with self.invalidated(targets,
                          fingerprint_strategy=fingerprint_strategy,
                          invalidate_dependents=True) as invalidation_check:
      for vt in invalidation_check.invalid_vts:
        product_deps_by_src = self.context.products.get_data('product_deps_by_src').get(vt.target)
        if product_deps_by_src is not None:
          self.check(vt.target, product_deps_by_src)
Exemplo n.º 21
0
  def execute(self):
    if JvmPlatform.global_instance().get_options().compiler != self.compiler_name:
      # If the requested compiler is not the one supported by this task,
      # bail early.
      return

    # In case we have no relevant targets and return early, create the requested product maps.
    self.create_empty_extra_products()

    relevant_targets = list(self.context.targets(predicate=self.select))

    if not relevant_targets:
      return

    # Clone the compile_classpath to the runtime_classpath.
    classpath_product = self.create_runtime_classpath()

    fingerprint_strategy = DependencyContext.global_instance().create_fingerprint_strategy(
        classpath_product)
    # Note, JVM targets are validated (`vts.update()`) as they succeed.  As a result,
    # we begin writing artifacts out to the cache immediately instead of waiting for
    # all targets to finish.
    with self.invalidated(relevant_targets,
                          invalidate_dependents=True,
                          fingerprint_strategy=fingerprint_strategy,
                          topological_order=True) as invalidation_check:

      compile_contexts = {vt.target: self.create_compile_context(vt.target, vt.results_dir)
                          for vt in invalidation_check.all_vts}

      self.do_compile(
        invalidation_check,
        compile_contexts,
        classpath_product,
      )

      if not self.get_options().use_classpath_jars:
        # Once compilation has completed, replace the classpath entry for each target with
        # its jar'd representation.
        for ccs in compile_contexts.values():
          cc = self.select_runtime_context(ccs)
          for conf in self._confs:
            classpath_product.remove_for_target(cc.target, [(conf, cc.classes_dir.path)])
            classpath_product.add_for_target(cc.target, [(conf, cc.jar_file.path)])
Exemplo n.º 22
0
    def execute(self):
        if self._skip(self.get_options()):
            return

        classpath_product = self.context.products.get_data("runtime_classpath")
        fingerprint_strategy = DependencyContext.global_instance().create_fingerprint_strategy(
            classpath_product
        )

        targets = [target for target in self.context.targets() if hasattr(target, "strict_deps")]

        with self.invalidated(
            targets, fingerprint_strategy=fingerprint_strategy, invalidate_dependents=True
        ) as invalidation_check:
            for vt in invalidation_check.invalid_vts:
                product_deps_for_target = self.context.products.get_data(
                    "product_deps_by_target"
                ).get(vt.target)
                if product_deps_for_target is not None:
                    self.check(vt.target, product_deps_for_target)
Exemplo n.º 23
0
    def execute(self):
        if not self._create_products_if_should_run():
            return

        classpath_product = self.context.products.get_data("runtime_classpath")
        product_deps_by_target = self.context.products.get_data(
            "product_deps_by_target")

        fingerprint_strategy = DependencyContext.global_instance(
        ).create_fingerprint_strategy(classpath_product)

        # classpath fingerprint strategy only works on targets with a classpath.
        targets = [
            target for target in self.context.targets()
            if hasattr(target, "strict_deps")
        ]
        with self.invalidated(
                targets,
                fingerprint_strategy=fingerprint_strategy,
                invalidate_dependents=True) as invalidation_check:
            for vt in invalidation_check.all_vts:
                # A list of class paths to the artifacts created by the target we are computing deps for.
                target_artifact_classpaths = [
                    path
                    for _, path in classpath_product.get_for_target(vt.target)
                ]
                potential_deps_classpaths = self._zinc.compile_classpath(
                    "runtime_classpath", vt.target)

                jdeps_output_json = self._jdeps_output_json(vt)
                if not vt.valid:
                    self._run_jdeps_analysis(
                        vt.target,
                        target_artifact_classpaths,
                        potential_deps_classpaths,
                        jdeps_output_json,
                    )
                self._register_products(vt.target, jdeps_output_json,
                                        product_deps_by_target)
Exemplo n.º 24
0
        def work_for_vts_rsc(vts, ctx):
            target = ctx.target
            tgt, = vts.targets

            # If we didn't hit the cache in the cache job, run rsc.
            if not vts.valid:
                counter_val = str(counter()).rjust(counter.format_length(),
                                                   ' ')
                counter_str = '[{}/{}] '.format(counter_val, counter.size)
                self.context.log.info(
                    counter_str, 'Rsc-ing ',
                    items_to_report_element(ctx.sources,
                                            '{} source'.format(self.name())),
                    ' in ',
                    items_to_report_element(
                        [t.address.reference() for t in vts.targets],
                        'target'), ' (', ctx.target.address.spec, ').')
                # This does the following
                # - Collect the rsc classpath elements, including zinc compiles of rsc incompatible targets
                #   and rsc compiles of rsc compatible targets.
                # - Run Rsc on the current target with those as dependencies.

                dependencies_for_target = list(
                    DependencyContext.global_instance(
                    ).dependencies_respecting_strict_deps(target))

                classpath_paths = []
                classpath_directory_digests = []
                classpath_product = self.context.products.get_data(
                    'rsc_mixed_compile_classpath')
                classpath_entries = classpath_product.get_classpath_entries_for_targets(
                    dependencies_for_target)
                for _conf, classpath_entry in classpath_entries:
                    classpath_paths.append(
                        fast_relpath(classpath_entry.path, get_buildroot()))
                    if self.HERMETIC == self.execution_strategy_enum.value and not classpath_entry.directory_digest:
                        raise AssertionError(
                            "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic "
                            "execution of rsc".format(classpath_entry))
                    classpath_directory_digests.append(
                        classpath_entry.directory_digest)

                ctx.ensure_output_dirs_exist()

                with Timer() as timer:
                    # Outline Scala sources into SemanticDB / scalac compatible header jars.
                    # ---------------------------------------------
                    rsc_jar_file_relative_path = fast_relpath(
                        ctx.rsc_jar_file.path, get_buildroot())

                    sources_snapshot = ctx.target.sources_snapshot(
                        scheduler=self.context._scheduler)

                    distribution = self._get_jvm_distribution()

                    def hermetic_digest_classpath():
                        jdk_libs_rel, jdk_libs_digest = self._jdk_libs_paths_and_digest(
                            distribution)

                        merged_sources_and_jdk_digest = self.context._scheduler.merge_directories(
                            (jdk_libs_digest,
                             sources_snapshot.directory_digest) +
                            tuple(classpath_directory_digests))
                        classpath_rel_jdk = classpath_paths + jdk_libs_rel
                        return (merged_sources_and_jdk_digest,
                                classpath_rel_jdk)

                    def nonhermetic_digest_classpath():
                        classpath_abs_jdk = classpath_paths + self._jdk_libs_abs(
                            distribution)
                        return ((EMPTY_DIRECTORY_DIGEST), classpath_abs_jdk)

                    (input_digest, classpath_entry_paths
                     ) = self.execution_strategy_enum.resolve_for_enum_variant(
                         {
                             self.HERMETIC: hermetic_digest_classpath,
                             self.SUBPROCESS: nonhermetic_digest_classpath,
                             self.NAILGUN: nonhermetic_digest_classpath,
                         })()

                    target_sources = ctx.sources
                    args = [
                        '-cp',
                        os.pathsep.join(classpath_entry_paths),
                        '-d',
                        rsc_jar_file_relative_path,
                    ] + self.get_options().extra_rsc_args + target_sources

                    self.write_argsfile(ctx, args)

                    self._runtool(distribution, input_digest, ctx)

                self._record_target_stats(tgt, len(classpath_entry_paths),
                                          len(target_sources), timer.elapsed,
                                          False, 'rsc')

            # Update the products with the latest classes.
            self.register_extra_products_from_contexts([ctx.target],
                                                       compile_contexts)
Exemplo n.º 25
0
        def work_for_vts_rsc(vts, ctx):
            target = ctx.target
            (tgt, ) = vts.targets

            rsc_cc = compile_contexts[target].rsc_cc

            use_youtline = rsc_cc.workflow == self.JvmCompileWorkflowType.outline_and_zinc
            outliner = "scalac-outliner" if use_youtline else "rsc"

            if use_youtline and Semver.parse(
                    self._scala_library_version) < Semver.parse("2.12.9"):
                raise RuntimeError(
                    f"To use scalac's built-in outlining, scala version must be at least 2.12.9, but got {self._scala_library_version}"
                )

            # If we didn't hit the cache in the cache job, run rsc.
            if not vts.valid:
                counter_val = str(counter()).rjust(counter.format_length(),
                                                   " ")
                counter_str = f"[{counter_val}/{counter.size}] "
                action_str = "Outlining " if use_youtline else "Rsc-ing "

                self.context.log.info(
                    counter_str,
                    action_str,
                    items_to_report_element(ctx.sources,
                                            f"{self.name()} source"),
                    " in ",
                    items_to_report_element(
                        [t.address.reference() for t in vts.targets],
                        "target"),
                    " (",
                    ctx.target.address.spec,
                    ").",
                )
                # This does the following
                # - Collect the rsc classpath elements, including zinc compiles of rsc incompatible targets
                #   and rsc compiles of rsc compatible targets.
                # - Run Rsc on the current target with those as dependencies.

                dependencies_for_target = list(
                    DependencyContext.global_instance(
                    ).dependencies_respecting_strict_deps(target))

                classpath_paths = []
                classpath_digests = []
                classpath_product = self.context.products.get_data(
                    "rsc_mixed_compile_classpath")
                classpath_entries = classpath_product.get_classpath_entries_for_targets(
                    dependencies_for_target)

                hermetic = self.execution_strategy == self.ExecutionStrategy.hermetic
                for _conf, classpath_entry in classpath_entries:
                    classpath_paths.append(
                        fast_relpath(classpath_entry.path, get_buildroot()))
                    if hermetic and not classpath_entry.directory_digest:
                        raise AssertionError(
                            "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic "
                            "execution of {}".format(classpath_entry,
                                                     outliner))
                    classpath_digests.append(classpath_entry.directory_digest)

                ctx.ensure_output_dirs_exist()

                with Timer() as timer:
                    # Outline Scala sources into SemanticDB / scalac compatible header jars.
                    # ---------------------------------------------
                    rsc_jar_file_relative_path = fast_relpath(
                        ctx.rsc_jar_file.path, get_buildroot())

                    sources_snapshot = ctx.target.sources_snapshot(
                        scheduler=self.context._scheduler)

                    distribution = self._get_jvm_distribution()

                    def hermetic_digest_classpath():
                        jdk_libs_rel, jdk_libs_digest = self._jdk_libs_paths_and_digest(
                            distribution)

                        merged_sources_and_jdk_digest = self.context._scheduler.merge_directories(
                            (jdk_libs_digest, sources_snapshot.digest) +
                            tuple(classpath_digests))
                        classpath_rel_jdk = classpath_paths + jdk_libs_rel
                        return (merged_sources_and_jdk_digest,
                                classpath_rel_jdk)

                    def nonhermetic_digest_classpath():
                        classpath_abs_jdk = classpath_paths + self._jdk_libs_abs(
                            distribution)
                        return ((EMPTY_DIGEST), classpath_abs_jdk)

                    (input_digest, classpath_entry_paths) = match(
                        self.execution_strategy,
                        {
                            self.ExecutionStrategy.hermetic:
                            hermetic_digest_classpath,
                            self.ExecutionStrategy.subprocess:
                            nonhermetic_digest_classpath,
                            self.ExecutionStrategy.nailgun:
                            nonhermetic_digest_classpath,
                        },
                    )()

                    youtline_args = []
                    if use_youtline:
                        youtline_args = [
                            "-Youtline",
                            "-Ystop-after:pickler",
                            "-Ypickle-write",
                            rsc_jar_file_relative_path,
                        ]

                    target_sources = ctx.sources

                    # TODO: m.jar digests aren't found, so hermetic will fail.
                    if use_youtline and not hermetic and self.get_options(
                    ).zinc_outline:
                        self._zinc_outline(ctx, classpath_paths,
                                           target_sources, youtline_args)
                    else:
                        args = ([
                            "-cp",
                            os.pathsep.join(classpath_entry_paths),
                            "-d",
                            rsc_jar_file_relative_path,
                        ] + self.get_options().extra_rsc_args + youtline_args +
                                target_sources)

                        self.write_argsfile(ctx, args)

                        self._runtool(distribution, input_digest, ctx,
                                      use_youtline)

                self._record_target_stats(
                    tgt,
                    len(classpath_entry_paths),
                    len(target_sources),
                    timer.elapsed,
                    False,
                    outliner,
                )

            # Update the products with the latest classes.
            self.register_extra_products_from_contexts([ctx.target],
                                                       compile_contexts)
Exemplo n.º 26
0
    def work_for_vts_rsc(vts, ctx):
      # Double check the cache before beginning compilation
      hit_cache = self.check_cache(vts, counter)
      target = ctx.target
      tgt, = vts.targets

      if not hit_cache:
        counter_val = str(counter()).rjust(counter.format_length(), ' ' if PY3 else b' ')
        counter_str = '[{}/{}] '.format(counter_val, counter.size)
        self.context.log.info(
          counter_str,
          'Rsc-ing ',
          items_to_report_element(ctx.sources, '{} source'.format(self.name())),
          ' in ',
          items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
          ' (',
          ctx.target.address.spec,
          ').')

        # This does the following
        # - Collect the rsc classpath elements, including zinc compiles of rsc incompatible targets
        #   and rsc compiles of rsc compatible targets.
        # - Run Rsc on the current target with those as dependencies.

        dependencies_for_target = list(
          DependencyContext.global_instance().dependencies_respecting_strict_deps(target))

        rsc_deps_classpath_unprocessed = _paths_from_classpath(
          self.context.products.get_data('rsc_classpath').get_for_targets(dependencies_for_target),
          collection_type=OrderedSet)

        rsc_classpath_rel = fast_relpath_collection(list(rsc_deps_classpath_unprocessed))

        ctx.ensure_output_dirs_exist()

        with Timer() as timer:
          # Outline Scala sources into SemanticDB / scalac compatible header jars.
          # ---------------------------------------------
          rsc_jar_file = fast_relpath(ctx.rsc_jar_file, get_buildroot())

          sources_snapshot = ctx.target.sources_snapshot(scheduler=self.context._scheduler)

          distribution = self._get_jvm_distribution()

          def hermetic_digest_classpath():
            jdk_libs_rel, jdk_libs_digest = self._jdk_libs_paths_and_digest(distribution)
            merged_sources_and_jdk_digest = self.context._scheduler.merge_directories(
              (jdk_libs_digest, sources_snapshot.directory_digest))
            classpath_rel_jdk = rsc_classpath_rel + jdk_libs_rel
            return (merged_sources_and_jdk_digest, classpath_rel_jdk)
          def nonhermetic_digest_classpath():
            classpath_abs_jdk = rsc_classpath_rel + self._jdk_libs_abs(distribution)
            return ((EMPTY_DIRECTORY_DIGEST), classpath_abs_jdk)

          (input_digest, classpath_entry_paths) = self.execution_strategy_enum.resolve_for_enum_variant({
            self.HERMETIC: hermetic_digest_classpath,
            self.SUBPROCESS: nonhermetic_digest_classpath,
            self.NAILGUN: nonhermetic_digest_classpath,
          })()

          target_sources = ctx.sources
          args = [
                   '-cp', os.pathsep.join(classpath_entry_paths),
                   '-d', rsc_jar_file,
                 ] + target_sources

          self._runtool(
            'rsc.cli.Main',
            'rsc',
            args,
            distribution,
            tgt=tgt,
            input_files=tuple(rsc_classpath_rel),
            input_digest=input_digest,
            output_dir=os.path.dirname(rsc_jar_file))

        self._record_target_stats(tgt,
          len(rsc_classpath_rel),
          len(target_sources),
          timer.elapsed,
          False,
          'rsc'
        )
        # Write any additional resources for this target to the target workdir.
        self.write_extra_resources(ctx)

      # Update the products with the latest classes.
      self.register_extra_products_from_contexts([ctx.target], compile_contexts)
Exemplo n.º 27
0
        def work_for_vts_rsc(vts, ctx):
            # Double check the cache before beginning compilation
            hit_cache = self.check_cache(vts, counter)
            target = ctx.target
            tgt, = vts.targets

            if not hit_cache:
                counter_val = str(counter()).rjust(counter.format_length(),
                                                   b' ')
                counter_str = '[{}/{}] '.format(counter_val, counter.size)
                self.context.log.info(
                    counter_str, 'Rsc-ing ',
                    items_to_report_element(ctx.sources,
                                            '{} source'.format(self.name())),
                    ' in ',
                    items_to_report_element(
                        [t.address.reference() for t in vts.targets],
                        'target'), ' (', ctx.target.address.spec, ').')

                # This does the following
                # - collect jar dependencies and metacp-classpath entries for them
                # - collect the non-java targets and their classpath entries
                # - break out java targets and their javac'd classpath entries
                # metacp
                # - metacp the java targets
                # rsc
                # - combine the metacp outputs for jars, previous scala targets and the java metacp
                #   classpath
                # - run Rsc on the current target with those as dependencies

                dependencies_for_target = list(
                    DependencyContext.global_instance(
                    ).dependencies_respecting_strict_deps(target))

                jar_deps = [
                    t for t in dependencies_for_target
                    if isinstance(t, JarLibrary)
                ]

                def is_java_compile_target(t):
                    return isinstance(t, JavaLibrary) or t.has_sources('.java')

                java_deps = [
                    t for t in dependencies_for_target
                    if is_java_compile_target(t)
                ]
                non_java_deps = [
                    t for t in dependencies_for_target
                    if not (is_java_compile_target(t))
                    and not isinstance(t, JarLibrary)
                ]

                metacped_jar_classpath_abs = _paths_from_classpath(
                    self._metacp_jars_classpath_product.get_for_targets(
                        jar_deps))
                metacped_jar_classpath_abs.extend(
                    self._jvm_lib_metacp_classpath)
                metacped_jar_classpath_rel = fast_relpath_collection(
                    metacped_jar_classpath_abs)

                jar_rsc_classpath_paths = _paths_from_classpath(
                    self.context.products.get_data(
                        'rsc_classpath').get_for_targets(jar_deps),
                    collection_type=set)
                jar_rsc_classpath_rel = fast_relpath_collection(
                    jar_rsc_classpath_paths)

                non_java_paths = _paths_from_classpath(
                    self.context.products.get_data(
                        'rsc_classpath').get_for_targets(non_java_deps),
                    collection_type=set)
                non_java_rel = fast_relpath_collection(non_java_paths)

                java_paths = _paths_from_classpath(
                    self.context.products.get_data(
                        'rsc_classpath').get_for_targets(java_deps),
                    collection_type=set)
                java_rel = fast_relpath_collection(java_paths)

                ctx.ensure_output_dirs_exist()

                distribution = self._get_jvm_distribution()
                with Timer() as timer:
                    # Step 1: Convert classpath to SemanticDB
                    # ---------------------------------------
                    # If there are any as yet not metacp'd dependencies, metacp them so their indices can
                    # be passed to Rsc.
                    # TODO move these to their own jobs. https://github.com/pantsbuild/pants/issues/6754

                    # Inputs
                    # - Java dependencies jars
                    metacp_inputs = java_rel

                    # Dependencies
                    # - 3rdparty jars
                    # - non-java, ie scala, dependencies
                    # - jdk
                    snapshotable_metacp_dependencies = list(jar_rsc_classpath_rel) + \
                                          list(non_java_rel) + \
                                          fast_relpath_collection(
                                            _paths_from_classpath(self._extra_compile_time_classpath))
                    metacp_dependencies = snapshotable_metacp_dependencies + self._jvm_lib_jars_abs

                    if metacp_inputs:
                        rsc_index_dir = fast_relpath(ctx.rsc_index_dir,
                                                     get_buildroot())
                        args = [
                            '--verbose',
                            '--stub-broken-signatures',
                            '--dependency-classpath',
                            os.pathsep.join(metacp_dependencies),
                            # NB: The directory to dump the semanticdb jars generated by metacp.
                            '--out',
                            rsc_index_dir,
                            os.pathsep.join(metacp_inputs),
                        ]
                        metacp_wu = self._runtool(
                            'scala.meta.cli.Metacp',
                            'metacp',
                            args,
                            distribution,
                            tgt=tgt,
                            input_files=tuple(
                                metacp_inputs +
                                snapshotable_metacp_dependencies),
                            output_dir=rsc_index_dir)
                        metacp_stdout = stdout_contents(metacp_wu)
                        metacp_result = json.loads(metacp_stdout)

                        metacped_java_dependency_rel = self._collect_metai_classpath(
                            metacp_result, java_rel)

                        # Step 1.5: metai Index the semanticdbs
                        # -------------------------------------
                        self._run_metai_tool(distribution,
                                             metacped_java_dependency_rel,
                                             rsc_index_dir, tgt)
                    else:
                        # NB: there are no unmetacp'd dependencies
                        metacped_java_dependency_rel = []

                    # Step 2: Outline Scala sources into SemanticDB
                    # ---------------------------------------------
                    rsc_mjar_file = fast_relpath(ctx.rsc_mjar_file,
                                                 get_buildroot())

                    # TODO remove non-rsc entries from non_java_rel in a better way
                    rsc_semanticdb_classpath = metacped_java_dependency_rel + \
                                               metacped_jar_classpath_rel + \
                                               [j for j in non_java_rel if 'compile/rsc/' in j]
                    target_sources = ctx.sources
                    args = [
                        '-cp',
                        os.pathsep.join(rsc_semanticdb_classpath),
                        '-d',
                        rsc_mjar_file,
                    ] + target_sources
                    sources_snapshot = ctx.target.sources_snapshot(
                        scheduler=self.context._scheduler)
                    self._runtool(
                        'rsc.cli.Main',
                        'rsc',
                        args,
                        distribution,
                        tgt=tgt,
                        input_files=tuple(rsc_semanticdb_classpath),
                        input_digest=sources_snapshot.directory_digest,
                        output_dir=os.path.dirname(rsc_mjar_file))

                self._record_target_stats(tgt, len(metacp_inputs),
                                          len(target_sources), timer.elapsed,
                                          False, 'rsc')
                # Write any additional resources for this target to the target workdir.
                self.write_extra_resources(ctx)

            # Update the products with the latest classes.
            self.register_extra_products_from_contexts([ctx.target],
                                                       compile_contexts)
Exemplo n.º 28
0
        def work_for_vts_rsc(vts, ctx):
            target = ctx.target
            tgt, = vts.targets

            rsc_cc = compile_contexts[target].rsc_cc

            use_youtline = rsc_cc.workflow == self.JvmCompileWorkflowType.outline_and_zinc
            outliner = 'scalac-outliner' if use_youtline else 'rsc'

            if use_youtline and Semver.parse(
                    self._scala_library_version) < Semver.parse("2.12.9"):
                raise RuntimeError(
                    f"To use scalac's built-in outlining, scala version must be at least 2.12.9, but got {self._scala_library_version}"
                )

            # If we didn't hit the cache in the cache job, run rsc.
            if not vts.valid:
                counter_val = str(counter()).rjust(counter.format_length(),
                                                   ' ')
                counter_str = '[{}/{}] '.format(counter_val, counter.size)
                action_str = 'Outlining ' if use_youtline else 'Rsc-ing '

                self.context.log.info(
                    counter_str, action_str,
                    items_to_report_element(ctx.sources,
                                            '{} source'.format(self.name())),
                    ' in ',
                    items_to_report_element(
                        [t.address.reference() for t in vts.targets],
                        'target'), ' (', ctx.target.address.spec, ').')
                # This does the following
                # - Collect the rsc classpath elements, including zinc compiles of rsc incompatible targets
                #   and rsc compiles of rsc compatible targets.
                # - Run Rsc on the current target with those as dependencies.

                dependencies_for_target = list(
                    DependencyContext.global_instance(
                    ).dependencies_respecting_strict_deps(target))

                classpath_paths = []
                classpath_directory_digests = []
                classpath_product = self.context.products.get_data(
                    'rsc_mixed_compile_classpath')
                classpath_entries = classpath_product.get_classpath_entries_for_targets(
                    dependencies_for_target)
                for _conf, classpath_entry in classpath_entries:
                    classpath_paths.append(
                        fast_relpath(classpath_entry.path, get_buildroot()))
                    if self.execution_strategy == self.ExecutionStrategy.hermetic and not classpath_entry.directory_digest:
                        raise AssertionError(
                            "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic "
                            "execution of {}".format(classpath_entry,
                                                     outliner))
                    classpath_directory_digests.append(
                        classpath_entry.directory_digest)

                ctx.ensure_output_dirs_exist()

                with Timer() as timer:
                    # Outline Scala sources into SemanticDB / scalac compatible header jars.
                    # ---------------------------------------------
                    rsc_jar_file_relative_path = fast_relpath(
                        ctx.rsc_jar_file.path, get_buildroot())

                    sources_snapshot = ctx.target.sources_snapshot(
                        scheduler=self.context._scheduler)

                    distribution = self._get_jvm_distribution()

                    def hermetic_digest_classpath():
                        jdk_libs_rel, jdk_libs_digest = self._jdk_libs_paths_and_digest(
                            distribution)

                        merged_sources_and_jdk_digest = self.context._scheduler.merge_directories(
                            (jdk_libs_digest,
                             sources_snapshot.directory_digest) +
                            tuple(classpath_directory_digests))
                        classpath_rel_jdk = classpath_paths + jdk_libs_rel
                        return (merged_sources_and_jdk_digest,
                                classpath_rel_jdk)

                    def nonhermetic_digest_classpath():
                        classpath_abs_jdk = classpath_paths + self._jdk_libs_abs(
                            distribution)
                        return ((EMPTY_DIRECTORY_DIGEST), classpath_abs_jdk)

                    (input_digest,
                     classpath_entry_paths) = self.execution_strategy.match({
                         self.ExecutionStrategy.hermetic:
                         hermetic_digest_classpath,
                         self.ExecutionStrategy.subprocess:
                         nonhermetic_digest_classpath,
                         self.ExecutionStrategy.nailgun:
                         nonhermetic_digest_classpath,
                     })()

                    youtline_args = []
                    if use_youtline:
                        youtline_args = [
                            "-Youtline",
                            "-Ystop-after:pickler",
                            "-Ypickle-write",
                            rsc_jar_file_relative_path,
                        ]
                        if not self.get_options().allow_public_inference:
                            wartremover_args = [
                                f"-Xplugin:{self._wartremover_classpath[0]}",
                                "-P:wartremover:traverser:org.wartremover.warts.PublicInference",
                                "-Ycache-plugin-class-loader:last-modified",
                            ]
                            youtline_args = wartremover_args + youtline_args

                    target_sources = ctx.sources
                    args = [
                        '-cp',
                        os.pathsep.join(classpath_entry_paths),
                        '-d',
                        rsc_jar_file_relative_path,
                    ] + self.get_options(
                    ).extra_rsc_args + youtline_args + target_sources

                    self.write_argsfile(ctx, args)

                    self._runtool(distribution, input_digest, ctx,
                                  use_youtline)

                self._record_target_stats(tgt, len(classpath_entry_paths),
                                          len(target_sources), timer.elapsed,
                                          False, outliner)

            # Update the products with the latest classes.
            self.register_extra_products_from_contexts([ctx.target],
                                                       compile_contexts)
Exemplo n.º 29
0
 def _is_strict_deps(target: Target) -> bool:
     return isinstance(
         target, JvmTarget
     ) and DependencyContext.global_instance().defaulted_property(target, "strict_deps")
Exemplo n.º 30
0
 def _collect_invalid_compile_dependencies(self, compile_target, invalid_target_set):
   all_strict_deps = DependencyContext.global_instance().dependencies_respecting_strict_deps(compile_target)
   return list(set(invalid_target_set) & set(all_strict_deps) - set([compile_target]))
Exemplo n.º 31
0
    def work_for_vts_rsc(vts, ctx):
      # Double check the cache before beginning compilation
      hit_cache = self.check_cache(vts, counter)
      target = ctx.target

      if not hit_cache:
        cp_entries = []

        # Include the current machine's jdk lib jars. This'll blow up remotely.
        # We need a solution for that.
        # Probably something to do with https://github.com/pantsbuild/pants/pull/6346
        distribution = JvmPlatform.preferred_jvm_distribution([ctx.target.platform], strict=True)
        jvm_lib_jars_abs = distribution.find_libs(['rt.jar', 'dt.jar', 'jce.jar', 'tools.jar'])
        cp_entries.extend(jvm_lib_jars_abs)

        classpath_abs = self._zinc.compile_classpath(
          'rsc_classpath',
          ctx.target,
          extra_cp_entries=self._extra_compile_time_classpath)

        jar_deps = [t for t in DependencyContext.global_instance().dependencies_respecting_strict_deps(target)
                    if isinstance(t, JarLibrary)]
        metacp_jar_classpath_abs = [y[1] for y in self._metacp_jars_classpath_product.get_for_targets(
          jar_deps
        )]
        jar_jar_paths = {y[1] for y in self.context.products.get_data('rsc_classpath').get_for_targets(jar_deps)}

        classpath_abs = [c for c in classpath_abs if c not in jar_jar_paths]


        classpath_rel = fast_relpath_collection(classpath_abs)
        metacp_jar_classpath_rel = fast_relpath_collection(metacp_jar_classpath_abs)
        cp_entries.extend(classpath_rel)

        ctx.ensure_output_dirs_exist()

        counter_val = str(counter()).rjust(counter.format_length(), b' ')
        counter_str = '[{}/{}] '.format(counter_val, counter.size)
        self.context.log.info(
          counter_str,
          'Rsc-ing ',
          items_to_report_element(ctx.sources, '{} source'.format(self.name())),
          ' in ',
          items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
          ' (',
          ctx.target.address.spec,
          ').')

        tgt, = vts.targets
        with Timer() as timer:
          # Step 1: Convert classpath to SemanticDB
          # ---------------------------------------
          scalac_classpath_path_entries_abs = self.tool_classpath('workaround-metacp-dependency-classpath')
          scalac_classpath_path_entries = fast_relpath_collection(scalac_classpath_path_entries_abs)
          rsc_index_dir = fast_relpath(ctx.rsc_index_dir, get_buildroot())
          args = [
            '--verbose',
            # NB: Without this setting, rsc will be missing some symbols
            #     from the scala library.
            '--include-scala-library-synthetics', # TODO generate these once and cache them
            # NB: We need to add these extra dependencies in order to be able
            #     to find symbols used by the scalac jars.
            '--dependency-classpath', os.pathsep.join(scalac_classpath_path_entries + list(jar_jar_paths)),
            # NB: The directory to dump the semanticdb jars generated by metacp.
            '--out', rsc_index_dir,
            os.pathsep.join(cp_entries),
          ]
          metacp_wu = self._runtool(
            'scala.meta.cli.Metacp',
            'metacp',
            args,
            distribution,
            tgt=tgt,
            input_files=(scalac_classpath_path_entries + classpath_rel),
            output_dir=rsc_index_dir)
          metacp_stdout = stdout_contents(metacp_wu)
          metacp_result = json.loads(metacp_stdout)


          metai_classpath = self._collect_metai_classpath(
            metacp_result, classpath_rel, jvm_lib_jars_abs)

          # Step 1.5: metai Index the semanticdbs
          # -------------------------------------
          self._run_metai_tool(distribution, metai_classpath, rsc_index_dir, tgt)

          # Step 2: Outline Scala sources into SemanticDB
          # ---------------------------------------------
          rsc_outline_dir = fast_relpath(ctx.rsc_outline_dir, get_buildroot())
          rsc_out = os.path.join(rsc_outline_dir, 'META-INF/semanticdb/out.semanticdb')
          safe_mkdir(os.path.join(rsc_outline_dir, 'META-INF/semanticdb'))
          target_sources = ctx.sources
          args = [
            '-cp', os.pathsep.join(metai_classpath + metacp_jar_classpath_rel),
            '-out', rsc_out,
          ] + target_sources
          self._runtool(
            'rsc.cli.Main',
            'rsc',
            args,
            distribution,
            tgt=tgt,
            # TODO pass the input files from the target snapshot instead of the below
            # input_snapshot = ctx.target.sources_snapshot(scheduler=self.context._scheduler)
            input_files=target_sources + metai_classpath + metacp_jar_classpath_rel,
            output_dir=rsc_outline_dir)
          rsc_classpath = [rsc_outline_dir]

          # Step 2.5: Postprocess the rsc outputs
          # TODO: This is only necessary as a workaround for https://github.com/twitter/rsc/issues/199.
          # Ideally, Rsc would do this on its own.
          self._run_metai_tool(distribution,
            rsc_classpath,
            rsc_outline_dir,
            tgt,
            extra_input_files=(rsc_out,))


          # Step 3: Convert SemanticDB into an mjar
          # ---------------------------------------
          rsc_mjar_file = fast_relpath(ctx.rsc_mjar_file, get_buildroot())
          args = [
            '-out', rsc_mjar_file,
            os.pathsep.join(rsc_classpath),
          ]
          self._runtool(
            'scala.meta.cli.Mjar',
            'mjar',
            args,
            distribution,
            tgt=tgt,
            input_files=(
              rsc_out,
            ),
            output_dir=os.path.dirname(rsc_mjar_file)
            )
          self.context.products.get_data('rsc_classpath').add_for_target(
            ctx.target,
            [(conf, ctx.rsc_mjar_file) for conf in self._confs],
          )

        self._record_target_stats(tgt,
                                  len(cp_entries),
                                  len(target_sources),
                                  timer.elapsed,
                                  False,
                                  'rsc'
                                  )
        # Write any additional resources for this target to the target workdir.
        self.write_extra_resources(ctx)

      # Update the products with the latest classes.
      self.register_extra_products_from_contexts([ctx.target], compile_contexts)
Exemplo n.º 32
0
    def work_for_vts_rsc(vts, ctx):
      # Double check the cache before beginning compilation
      hit_cache = self.check_cache(vts, counter)
      target = ctx.target
      tgt, = vts.targets

      if not hit_cache:
        counter_val = str(counter()).rjust(counter.format_length(), ' ' if PY3 else b' ')
        counter_str = '[{}/{}] '.format(counter_val, counter.size)
        self.context.log.info(
          counter_str,
          'Rsc-ing ',
          items_to_report_element(ctx.sources, '{} source'.format(self.name())),
          ' in ',
          items_to_report_element([t.address.reference() for t in vts.targets], 'target'),
          ' (',
          ctx.target.address.spec,
          ').')

        # This does the following
        # - collect jar dependencies and metacp-classpath entries for them
        # - collect the non-java targets and their classpath entries
        # - break out java targets and their javac'd classpath entries
        # metacp
        # - metacp the java targets
        # rsc
        # - combine the metacp outputs for jars, previous scala targets and the java metacp
        #   classpath
        # - run Rsc on the current target with those as dependencies

        dependencies_for_target = list(
          DependencyContext.global_instance().dependencies_respecting_strict_deps(target))

        jar_deps = [t for t in dependencies_for_target if isinstance(t, JarLibrary)]

        def is_java_compile_target(t):
          return isinstance(t, JavaLibrary) or t.has_sources('.java')
        java_deps = [t for t in dependencies_for_target
                     if is_java_compile_target(t)]
        non_java_deps = [t for t in dependencies_for_target
                         if not (is_java_compile_target(t)) and not isinstance(t, JarLibrary)]

        metacped_jar_classpath_abs = _paths_from_classpath(
          self._metacp_jars_classpath_product.get_for_targets(jar_deps + java_deps)
        )
        metacped_jar_classpath_abs.extend(self._jvm_lib_metacp_classpath)
        metacped_jar_classpath_rel = fast_relpath_collection(metacped_jar_classpath_abs)

        non_java_paths = _paths_from_classpath(
          self.context.products.get_data('rsc_classpath').get_for_targets(non_java_deps),
          collection_type=set)
        non_java_rel = fast_relpath_collection(non_java_paths)

        ctx.ensure_output_dirs_exist()

        distribution = self._get_jvm_distribution()
        with Timer() as timer:
          # Outline Scala sources into SemanticDB
          # ---------------------------------------------
          rsc_mjar_file = fast_relpath(ctx.rsc_mjar_file, get_buildroot())

          # TODO remove non-rsc entries from non_java_rel in a better way
          rsc_semanticdb_classpath = metacped_jar_classpath_rel + \
                                     [j for j in non_java_rel if 'compile/rsc/' in j]
          target_sources = ctx.sources
          args = [
                   '-cp', os.pathsep.join(rsc_semanticdb_classpath),
                   '-d', rsc_mjar_file,
                 ] + target_sources
          sources_snapshot = ctx.target.sources_snapshot(scheduler=self.context._scheduler)
          self._runtool(
            'rsc.cli.Main',
            'rsc',
            args,
            distribution,
            tgt=tgt,
            input_files=tuple(rsc_semanticdb_classpath),
            input_digest=sources_snapshot.directory_digest,
            output_dir=os.path.dirname(rsc_mjar_file))

        self._record_target_stats(tgt,
          len(rsc_semanticdb_classpath),
          len(target_sources),
          timer.elapsed,
          False,
          'rsc'
        )
        # Write any additional resources for this target to the target workdir.
        self.write_extra_resources(ctx)

      # Update the products with the latest classes.
      self.register_extra_products_from_contexts([ctx.target], compile_contexts)