def test_fails_on_paths_outside_buildroot(self): a = self.make_target('a', JvmTarget) classpath_product = UnionProducts() classpath_product.add_for_target(a, [('default', '/dev/null')]) with self.assertRaises(TaskError) as cm: ClasspathUtil.compute_classpath([a], classpath_product, [], ['default']) self.assertEqual( str('Classpath entry /dev/null for target a:a is located outside the buildroot.'), str(cm.exception))
def test_fails_on_paths_outside_buildroot(self): a = self.make_target('a', JvmTarget) classpath_product = UnionProducts() classpath_product.add_for_target(a, [('default', '/dev/null')]) with self.assertRaises(TaskError) as cm: ClasspathUtil.compute_classpath([a], classpath_product, [], ['default']) self.assertEqual( str('Classpath entry /dev/null for target a:a is located outside the buildroot.' ), str(cm.exception))
def _compute_classpath_entries(self, classpath_products, compile_context, extra_compile_time_classpath): # Generate a classpath specific to this compile and target. target = compile_context.target if compile_context.strict_deps: classpath_targets = list(self._compute_strict_dependencies(target)) pruned = [t.address.spec for t in target.closure(bfs=True, **self._target_closure_kwargs) if t not in classpath_targets] self.context.log.debug( 'Using strict classpath for {}, which prunes the following dependencies: {}'.format( target.address.spec, pruned)) else: classpath_targets = target.closure(bfs=True, **self._target_closure_kwargs) classpath_targets = (t for t in classpath_targets if t != target) cp_entries = [compile_context.classes_dir] cp_entries.extend(ClasspathUtil.compute_classpath(classpath_targets, classpath_products, extra_compile_time_classpath, self._confs)) if isinstance(target, JavacPlugin): # Javac plugins need to compile against our distribution's tools.jar. There's no way to # express this via traversable_dependency_specs, so we inject it into the classpath here. cp_entries = self.dist.find_libs(['tools.jar']) + cp_entries return cp_entries
def _compute_classpath_entries(self, classpath_products, compile_context, extra_compile_time_classpath): # Generate a classpath specific to this compile and target. target = compile_context.target if compile_context.strict_deps: classpath_targets = list(self._compute_strict_dependencies(target)) pruned = [ t.address.spec for t in target.closure(bfs=True, **self._target_closure_kwargs) if t not in classpath_targets ] self.context.log.debug( 'Using strict classpath for {}, which prunes the following dependencies: {}' .format(target.address.spec, pruned)) else: classpath_targets = target.closure(bfs=True, **self._target_closure_kwargs) classpath_targets = (t for t in classpath_targets if t != target) cp_entries = [compile_context.classes_dir] cp_entries.extend( ClasspathUtil.compute_classpath(classpath_targets, classpath_products, extra_compile_time_classpath, self._confs)) if isinstance(target, JavacPlugin): # Javac plugins need to compile against our distribution's tools.jar. There's no way to # express this via traversable_dependency_specs, so we inject it into the classpath here. cp_entries = self.dist.find_libs(['tools.jar']) + cp_entries return cp_entries
def compile_classpath(self, classpath_product_key, target, extra_cp_entries=None): """Compute the compile classpath for the given target.""" classpath_product = self._products.get_data(classpath_product_key) if DependencyContext.global_instance().defaulted_property( target, lambda x: x.strict_deps): dependencies = target.strict_dependencies( DependencyContext.global_instance()) else: dependencies = DependencyContext.global_instance( ).all_dependencies(target) all_extra_cp_entries = list(self._compiler_plugins_cp_entries()) if extra_cp_entries: all_extra_cp_entries.extend(extra_cp_entries) # TODO: We convert dependencies to an iterator here in order to _preserve_ a bug that will be # fixed in https://github.com/pantsbuild/pants/issues/4874: `ClasspathUtil.compute_classpath` # expects to receive a list, but had been receiving an iterator. In the context of an # iterator, `excludes` are not applied # in ClasspathProducts.get_product_target_mappings_for_targets. return ClasspathUtil.compute_classpath(iter(dependencies), classpath_product, all_extra_cp_entries, self.DEFAULT_CONFS)
def test_single_classpath_element_no_excludes(self): a = self.make_target('a', JvmTarget) classpath_product = UnionProducts() path = os.path.join(self.build_root, 'jar/path') classpath_product.add_for_target(a, [('default', path)]) classpath = ClasspathUtil.compute_classpath([a], classpath_product, [], ['default']) self.assertEqual([path], classpath)
def work_for_vts(vts, ctx): progress_message = ctx.target.address.spec # Capture a compilation log if requested. log_file = ctx.log_file if self._capture_log else None # Double check the cache before beginning compilation hit_cache = check_cache(vts) if not hit_cache: # Compute the compile classpath for this target. cp_entries = [compile_context.classes_dir] cp_entries.extend(ClasspathUtil.compute_classpath(ctx.dependencies(self._dep_context), classpath_products, extra_compile_time_classpath, self._confs)) # TODO: always provide transitive analysis, but not always all classpath entries? upstream_analysis = dict(self._upstream_analysis(compile_contexts, cp_entries)) # Write analysis to a temporary file, and move it to the final location on success. tmp_analysis_file = "{}.tmp".format(ctx.analysis_file) if should_compile_incrementally(vts): # If this is an incremental compile, rebase the analysis to our new classes directory. self._analysis_tools.rebase_from_path(ctx.analysis_file, tmp_analysis_file, vts.previous_results_dir, vts.results_dir) else: # Otherwise, simply ensure that it is empty. safe_delete(tmp_analysis_file) tgt, = vts.targets fatal_warnings = self._compute_language_property(tgt, lambda x: x.fatal_warnings) self._compile_vts(vts, ctx.sources, tmp_analysis_file, upstream_analysis, cp_entries, ctx.classes_dir, log_file, progress_message, tgt.platform, fatal_warnings, counter) os.rename(tmp_analysis_file, ctx.analysis_file) self._analysis_tools.relativize(ctx.analysis_file, ctx.portable_analysis_file) # Write any additional resources for this target to the target workdir. self.write_extra_resources(ctx) # Jar the compiled output. self._create_context_jar(ctx) # Update the products with the latest classes. self._register_vts([ctx]) # Once products are registered, check for unused dependencies (if enabled). if not hit_cache and self._unused_deps_check_enabled: self._check_unused_deps(ctx)
def test_relies_on_product_to_validate_paths_outside_buildroot(self): a = self.make_target('a', JvmTarget) classpath_product = UnionProducts() classpath_product.add_for_target(a, [('default', '/dev/null')]) classpath = ClasspathUtil.compute_classpath([a], classpath_product, [], ['default']) self.assertEqual(['/dev/null'], classpath)
def test_single_classpath_element_no_excludes(self): a = self.make_target('a', JvmTarget) classpath_product = UnionProducts() path = os.path.join(self.build_root, 'jar/path') classpath_product.add_for_target(a, [('default', path)]) classpath = ClasspathUtil.compute_classpath([a], classpath_product, [], ['default']) self.assertEqual([path], classpath)
def work_for_vts(vts, ctx): progress_message = ctx.target.address.spec # Capture a compilation log if requested. log_file = ctx.log_file if self._capture_log else None # Double check the cache before beginning compilation hit_cache = check_cache(vts) if not hit_cache: # Compute the compile classpath for this target. cp_entries = [ctx.classes_dir] cp_entries.extend( ClasspathUtil.compute_classpath( ctx.dependencies(self._dep_context), classpath_products, extra_compile_time_classpath, self._confs, ) ) upstream_analysis = dict(self._upstream_analysis(compile_contexts, cp_entries)) if not should_compile_incrementally(vts, ctx): # Purge existing analysis file in non-incremental mode. safe_delete(ctx.analysis_file) # Work around https://github.com/pantsbuild/pants/issues/3670 safe_rmtree(ctx.classes_dir) tgt, = vts.targets fatal_warnings = self._compute_language_property(tgt, lambda x: x.fatal_warnings) self._compile_vts( vts, ctx.sources, ctx.analysis_file, upstream_analysis, cp_entries, ctx.classes_dir, log_file, progress_message, tgt.platform, fatal_warnings, counter, ) self._analysis_tools.relativize(ctx.analysis_file, ctx.portable_analysis_file) # Write any additional resources for this target to the target workdir. self.write_extra_resources(ctx) # Jar the compiled output. self._create_context_jar(ctx) # Update the products with the latest classes. self._register_vts([ctx]) # Once products are registered, check for unused dependencies (if enabled). if not hit_cache and self._unused_deps_check_enabled: self._check_unused_deps(ctx)
def test_excluded_classpath_element(self): a = self.make_target('a', JvmTarget, excludes=[Exclude('com.example', 'lib')]) classpath_product = UnionProducts() example_jar_path = os.path.join(self.build_root, 'ivy/jars/com.example/lib/123.4.jar') classpath_product.add_for_target(a, [('default', example_jar_path)]) classpath = ClasspathUtil.compute_classpath([a], classpath_product, [], ['default']) self.assertEqual([], classpath)
def test_relies_on_product_to_validate_paths_outside_buildroot(self): a = self.make_target('a', JvmTarget) classpath_product = UnionProducts() classpath_product.add_for_target(a, [('default', '/dev/null')]) classpath = ClasspathUtil.compute_classpath([a], classpath_product, [], ['default']) self.assertEqual(['/dev/null'], classpath)
def work_for_vts(vts, ctx): progress_message = ctx.target.address.spec # Capture a compilation log if requested. log_file = ctx.log_file if self._capture_log else None # Double check the cache before beginning compilation hit_cache = check_cache(vts) if not hit_cache: # Compute the compile classpath for this target. cp_entries = [ctx.classes_dir] cp_entries.extend(ClasspathUtil.compute_classpath(ctx.dependencies(self._dep_context), classpath_products, extra_compile_time_classpath, self._confs)) upstream_analysis = dict(self._upstream_analysis(compile_contexts, cp_entries)) if not should_compile_incrementally(vts, ctx): # Purge existing analysis file in non-incremental mode. safe_delete(ctx.analysis_file) # Work around https://github.com/pantsbuild/pants/issues/3670 safe_rmtree(ctx.classes_dir) tgt, = vts.targets fatal_warnings = self._compute_language_property(tgt, lambda x: x.fatal_warnings) zinc_file_manager = self._compute_language_property(tgt, lambda x: x.zinc_file_manager) self._compile_vts(vts, ctx.target, ctx.sources, ctx.analysis_file, upstream_analysis, cp_entries, ctx.classes_dir, log_file, ctx.zinc_args_file, progress_message, tgt.platform, fatal_warnings, zinc_file_manager, counter) self._analysis_tools.relativize(ctx.analysis_file, ctx.portable_analysis_file) # Write any additional resources for this target to the target workdir. self.write_extra_resources(ctx) # Jar the compiled output. self._create_context_jar(ctx) # Update the products with the latest classes. self._register_vts([ctx]) # Once products are registered, check for unused dependencies (if enabled). if not hit_cache and self._unused_deps_check_enabled: self._check_unused_deps(ctx)
def test_path_with_differing_conf_ignored(self): a = self.make_target('a', JvmTarget) classpath_product = UnionProducts() path = os.path.join(self.build_root, 'jar/path') classpath_product.add_for_target(a, [('default', path)]) classpath = ClasspathUtil.compute_classpath([a], classpath_product, [], ['not-default']) self.assertEqual([], classpath)
def test_parent_excludes_ignored_for_resolving_child_target(self): b = self.make_target('b', JvmTarget) self.make_target('a', JvmTarget, dependencies=[b], excludes=[Exclude('com.example', 'lib')]) classpath_product = UnionProducts() example_jar_path = os.path.join(self.build_root, 'ivy/jars/com.example/lib/123.4.jar') classpath_product.add_for_target(b, [('default', example_jar_path)]) classpath = ClasspathUtil.compute_classpath([b], classpath_product, [], ['default']) self.assertEqual([example_jar_path], classpath)
def test_path_with_overlapped_conf_added(self): a = self.make_target('a', JvmTarget) classpath_product = UnionProducts() path = os.path.join(self.build_root, 'jar/path') classpath_product.add_for_target(a, [('default', path)]) classpath = ClasspathUtil.compute_classpath([a], classpath_product, [], ['not-default', 'default']) self.assertEqual([path], classpath)
def test_exclude_leaves_other_jars_unaffected(self): b = self.make_target('b', JvmTarget, excludes=[Exclude('com.example', 'lib')]) a = self.make_target('a', JvmTarget, dependencies=[b]) classpath_product = UnionProducts() com_example_jar_path = os.path.join(self.build_root, 'ivy/jars/com.example/lib/123.4.jar') org_example_jar_path = os.path.join(self.build_root, 'ivy/jars/org.example/lib/123.4.jar') classpath_product.add_for_target(a, [('default', com_example_jar_path), ('default', org_example_jar_path)]) classpath = ClasspathUtil.compute_classpath([a], classpath_product, [], ['default']) self.assertEqual([org_example_jar_path], classpath)
def test_extra_path_added(self): a = self.make_target('a', JvmTarget) classpath_product = UnionProducts() path = os.path.join(self.build_root, 'jar/path') classpath_product.add_for_target(a, [('default', path)]) extra_path = 'new-path' classpath = ClasspathUtil.compute_classpath([a], classpath_product, [('default', extra_path)], ['default']) self.assertEqual([path, extra_path], classpath)
def test_extra_path_added(self): a = self.make_target('a', JvmTarget) classpath_product = UnionProducts() path = os.path.join(self.build_root, 'jar/path') classpath_product.add_for_target(a, [('default', path)]) extra_path = 'new-path' classpath = ClasspathUtil.compute_classpath([a], classpath_product, [('default', extra_path)], ['default']) self.assertEqual([path, extra_path], classpath)
def test_path_with_overlapped_conf_added(self): a = self.make_target('a', JvmTarget) classpath_product = UnionProducts() path = os.path.join(self.build_root, 'jar/path') classpath_product.add_for_target(a, [('default', path)]) classpath = ClasspathUtil.compute_classpath([a], classpath_product, extra_classpath_tuples=[], confs=['not-default', 'default']) self.assertEqual([path], classpath)
def test_path_with_differing_conf_ignored(self): a = self.make_target('a', JvmTarget) classpath_product = ClasspathProducts(self.pants_workdir) path = os.path.join(self.pants_workdir, 'jar/path') classpath_product.add_for_target(a, [('default', path)]) classpath = ClasspathUtil.compute_classpath([a], classpath_product, extra_classpath_tuples=[], confs=['not-default']) self.assertEqual([], classpath)
def test_path_with_differing_conf_ignored(self): a = self.make_target('a', JvmTarget) classpath_product = ClasspathProducts(self.pants_workdir) path = os.path.join(self.pants_workdir, 'jar/path') classpath_product.add_for_target(a, [('default', path)]) classpath = ClasspathUtil.compute_classpath([a], classpath_product, extra_classpath_tuples=[], confs=['not-default']) self.assertEqual([], classpath)
def test_excluded_classpath_element(self): a = self.make_target('a', JvmTarget, excludes=[Exclude('com.example', 'lib')]) classpath_product = UnionProducts() example_jar_path = os.path.join(self.build_root, 'ivy/jars/com.example/lib/123.4.jar') classpath_product.add_for_target(a, [('default', example_jar_path)]) classpath = ClasspathUtil.compute_classpath([a], classpath_product, [], ['default']) self.assertEqual([], classpath)
def test_path_with_overlapped_conf_added(self): a = self.make_target("a", JvmTarget) classpath_product = ClasspathProducts(self.pants_workdir) path = os.path.join(self.pants_workdir, "jar/path") classpath_product.add_for_target(a, [("default", path)]) classpath = ClasspathUtil.compute_classpath( [a], classpath_product, extra_classpath_tuples=[], confs=["not-default", "default"]) self.assertEqual([path], classpath)
def test_extra_path_added(self): a = self.make_target('a', JvmTarget) classpath_product = ClasspathProducts(self.pants_workdir) path = os.path.join(self.pants_workdir, 'jar/path') classpath_product.add_for_target(a, [('default', path)]) extra_path = 'new-path' extra_cp_tuples = [('default', extra_path)] classpath = ClasspathUtil.compute_classpath([a], classpath_product, extra_classpath_tuples=extra_cp_tuples, confs=['default']) self.assertEqual([path, extra_path], classpath)
def _compute_classpath_entries(self, classpath_products, compile_context, extra_compile_time_classpath): # Generate a classpath specific to this compile and target. target = compile_context.target if compile_context.strict_deps: classpath_targets = list(self._compute_strict_dependencies(target)) pruned = [t.address.spec for t in target.closure(bfs=True) if t not in classpath_targets] self.context.log.debug( 'Using strict classpath for {}, which prunes the following dependencies: {}'.format( target.address.spec, pruned)) else: classpath_targets = target.closure(bfs=True) return ClasspathUtil.compute_classpath(classpath_targets, classpath_products, extra_compile_time_classpath, self._confs)
def test_parent_excludes_ignored_for_resolving_child_target(self): b = self.make_target('b', JvmTarget) self.make_target('a', JvmTarget, dependencies=[b], excludes=[Exclude('com.example', 'lib')]) classpath_product = UnionProducts() example_jar_path = os.path.join(self.build_root, 'ivy/jars/com.example/lib/123.4.jar') classpath_product.add_for_target(b, [('default', example_jar_path)]) classpath = ClasspathUtil.compute_classpath([b], classpath_product, [], ['default']) self.assertEqual([example_jar_path], classpath)
def test_extra_path_added(self): a = self.make_target('a', JvmTarget) classpath_product = ClasspathProducts(self.pants_workdir) path = os.path.join(self.pants_workdir, 'jar/path') classpath_product.add_for_target(a, [('default', path)]) extra_path = 'new-path' extra_cp_tuples = [('default', extra_path)] classpath = ClasspathUtil.compute_classpath( [a], classpath_product, extra_classpath_tuples=extra_cp_tuples, confs=['default']) self.assertEqual([path, extra_path], classpath)
def test_extra_path_added(self): a = self.make_target("a", JvmTarget) classpath_product = ClasspathProducts(self.pants_workdir) path = os.path.join(self.pants_workdir, "jar/path") classpath_product.add_for_target(a, [("default", path)]) extra_path = "new-path" extra_cp_tuples = [("default", extra_path)] classpath = ClasspathUtil.compute_classpath( [a], classpath_product, extra_classpath_tuples=extra_cp_tuples, confs=["default"]) self.assertEqual([path, extra_path], classpath)
def _compute_classpath_entries(self, classpath_products, compile_context, extra_compile_time_classpath): # Generate a classpath specific to this compile and target. target = compile_context.target if compile_context.strict_deps: classpath_targets = list(self._compute_strict_dependencies(target)) pruned = [ t.address.spec for t in target.closure(bfs=True) if t not in classpath_targets ] self.context.log.debug( 'Using strict classpath for {}, which prunes the following dependencies: {}' .format(target.address.spec, pruned)) else: classpath_targets = target.closure(bfs=True) return ClasspathUtil.compute_classpath(classpath_targets, classpath_products, extra_compile_time_classpath, self._confs)
def test_exclude_leaves_other_jars_unaffected(self): b = self.make_target('b', JvmTarget, excludes=[Exclude('com.example', 'lib')]) a = self.make_target('a', JvmTarget, dependencies=[b]) classpath_product = UnionProducts() com_example_jar_path = os.path.join( self.build_root, 'ivy/jars/com.example/lib/123.4.jar') org_example_jar_path = os.path.join( self.build_root, 'ivy/jars/org.example/lib/123.4.jar') classpath_product.add_for_target(a, [('default', com_example_jar_path), ('default', org_example_jar_path)]) classpath = ClasspathUtil.compute_classpath([a], classpath_product, [], ['default']) self.assertEqual([org_example_jar_path], classpath)
def compile_classpath(self, classpath_product_key, target, extra_cp_entries=None): """Compute the compile classpath for the given target.""" classpath_product = self._products.get_data(classpath_product_key) if DependencyContext.global_instance().defaulted_property(target, lambda x: x.strict_deps): dependencies = target.strict_dependencies(DependencyContext.global_instance()) else: dependencies = DependencyContext.global_instance().all_dependencies(target) all_extra_cp_entries = list(self._compiler_plugins_cp_entries()) if extra_cp_entries: all_extra_cp_entries.extend(extra_cp_entries) # TODO: We convert dependencies to an iterator here in order to _preserve_ a bug that will be # fixed in https://github.com/pantsbuild/pants/issues/4874: `ClasspathUtil.compute_classpath` # expects to receive a list, but had been receiving an iterator. In the context of an # iterator, `excludes` are not applied # in ClasspathProducts.get_product_target_mappings_for_targets. return ClasspathUtil.compute_classpath(iter(dependencies), classpath_product, all_extra_cp_entries, self.DEFAULT_CONFS)
def work_for_vts(vts, ctx): progress_message = ctx.target.address.spec # Capture a compilation log if requested. log_file = ctx.log_file if self._capture_log else None # Double check the cache before beginning compilation hit_cache = check_cache(vts) if not hit_cache: # Compute the compile classpath for this target. cp_entries = [ctx.classes_dir] # TODO: We convert to an iterator here in order to _preserve_ a bug that will be fixed # in https://github.com/pantsbuild/pants/issues/4874: `ClasspathUtil.compute_classpath` # expects to receive a list, but had been receiving an iterator. In the context of an # iterator, `excludes` are not applied # in ClasspathProducts.get_product_target_mappings_for_targets. dependencies_iter = iter(ctx.dependencies(self._dep_context)) cp_entries.extend(ClasspathUtil.compute_classpath(dependencies_iter, classpath_products, extra_compile_time_classpath, self._confs)) upstream_analysis = dict(self._upstream_analysis(compile_contexts, cp_entries)) is_incremental = should_compile_incrementally(vts, ctx) if not is_incremental: # Purge existing analysis file in non-incremental mode. safe_delete(ctx.analysis_file) # Work around https://github.com/pantsbuild/pants/issues/3670 safe_rmtree(ctx.classes_dir) tgt, = vts.targets fatal_warnings = self._compute_language_property(tgt, lambda x: x.fatal_warnings) zinc_file_manager = self._compute_language_property(tgt, lambda x: x.zinc_file_manager) with Timer() as timer: self._compile_vts(vts, ctx.target, ctx.sources, ctx.analysis_file, upstream_analysis, cp_entries, ctx.classes_dir, log_file, ctx.zinc_args_file, progress_message, tgt.platform, fatal_warnings, zinc_file_manager, counter) self._record_target_stats(tgt, len(cp_entries), len(ctx.sources), timer.elapsed, is_incremental) self._analysis_tools.relativize(ctx.analysis_file, ctx.portable_analysis_file) # Write any additional resources for this target to the target workdir. self.write_extra_resources(ctx) # Jar the compiled output. self._create_context_jar(ctx) # Update the products with the latest classes. self._register_vts([ctx]) # Once products are registered, check for unused dependencies (if enabled). if not hit_cache and self._unused_deps_check_enabled: self._check_unused_deps(ctx)
def compile_sub_chunk(self, invalidation_check, all_targets, invalid_targets, extra_compile_time_classpath_elements, compile_vts, register_vts, update_artifact_cache_vts_work, settings): """Executes compilations for the invalid targets contained in a single chunk. Has the side effects of populating: # valid/invalid analysis files # classes_by_source product # classes_by_target product # resources_by_target product """ extra_classpath_tuples = self._compute_extra_classpath(extra_compile_time_classpath_elements) # Get the classpath generated by upstream JVM tasks and our own prepare_compile(). # NB: The global strategy uses the aggregated classpath (for all targets) to compile each # chunk, which avoids needing to introduce compile-time dependencies between annotation # processors and the classes they annotate. compile_classpath = ClasspathUtil.compute_classpath(all_targets, self.context.products.get_data( 'compile_classpath'), extra_classpath_tuples, self._confs) # Find the invalid sources for this chunk. invalid_sources_by_target = {t: self._sources_for_target(t) for t in invalid_targets} tmpdir = os.path.join(self.analysis_tmpdir, str(uuid.uuid4())) os.mkdir(tmpdir) # Figure out the sources and analysis belonging to each partition. partitions = [] # Each element is a triple (vts, sources_by_target, analysis). for vts in invalidation_check.invalid_vts_partitioned: partition_tmpdir = os.path.join(tmpdir, Target.maybe_readable_identify(vts.targets)) os.mkdir(partition_tmpdir) sources = list(itertools.chain.from_iterable( [invalid_sources_by_target.get(t, []) for t in vts.targets])) de_duped_sources = list(OrderedSet(sources)) if len(sources) != len(de_duped_sources): counts = [(src, len(list(srcs))) for src, srcs in itertools.groupby(sorted(sources))] self.context.log.warn( 'De-duped the following sources:\n\t{}' .format('\n\t'.join(sorted('{} {}'.format(cnt, src) for src, cnt in counts if cnt > 1)))) analysis_file = os.path.join(partition_tmpdir, 'analysis') partitions.append((vts, de_duped_sources, analysis_file)) # Split per-partition files out of the global invalid analysis. if self._analysis_parser.is_nonempty_analysis(self._invalid_analysis_file) and partitions: with self.context.new_workunit(name='partition-analysis'): splits = [(x[1], x[2]) for x in partitions] # We have to pass the analysis for any deleted files through zinc, to give it # a chance to delete the relevant class files. if splits: splits[0] = (splits[0][0] + self._deleted_sources, splits[0][1]) self._analysis_tools.split_to_paths(self._invalid_analysis_file, splits) # Now compile partitions one by one. for partition_index, partition in enumerate(partitions): (vts, sources, analysis_file) = partition progress_message = 'partition {} of {}'.format(partition_index + 1, len(partitions)) # We have to treat the global output dir as an upstream element, so compilers can # find valid analysis for previous partitions. We use the global valid analysis # for the upstream. upstream_analysis = ({self._classes_dir: self._analysis_file} if os.path.exists(self._analysis_file) else {}) compile_vts(vts, sources, analysis_file, upstream_analysis, compile_classpath, self._classes_dir, None, progress_message, settings) # No exception was thrown, therefore the compile succeeded and analysis_file is now valid. if os.path.exists(analysis_file): # The compilation created an analysis. # Merge the newly-valid analysis with our global valid analysis. new_valid_analysis = analysis_file + '.valid.new' if self._analysis_parser.is_nonempty_analysis(self._analysis_file): with self.context.new_workunit(name='update-upstream-analysis'): self._analysis_tools.merge_from_paths([self._analysis_file, analysis_file], new_valid_analysis) else: # We need to keep analysis_file around. Background tasks may need it. shutil.copy(analysis_file, new_valid_analysis) # Move the merged valid analysis to its proper location. # We do this before checking for missing dependencies, so that we can still # enjoy an incremental compile after fixing missing deps. self.move(new_valid_analysis, self._analysis_file) # Update the products with the latest classes. Must happen before the # missing dependencies check. register_vts([self.compile_context(t) for t in vts.targets]) if self._dep_analyzer: # Check for missing dependencies. actual_deps = self._analysis_parser.parse_deps_from_path(analysis_file, lambda: self._compute_classpath_elements_by_class(compile_classpath), self._classes_dir) with self.context.new_workunit(name='find-missing-dependencies'): self._dep_analyzer.check(sources, actual_deps) # Kick off the background artifact cache write. if update_artifact_cache_vts_work: self._write_to_artifact_cache(analysis_file, vts, update_artifact_cache_vts_work) if self._analysis_parser.is_nonempty_analysis(self._invalid_analysis_file): with self.context.new_workunit(name='trim-downstream-analysis'): # Trim out the newly-valid sources from our global invalid analysis. new_invalid_analysis = analysis_file + '.invalid.new' discarded_invalid_analysis = analysis_file + '.invalid.discard' self._analysis_tools.split_to_paths(self._invalid_analysis_file, [(sources, discarded_invalid_analysis)], new_invalid_analysis) self.move(new_invalid_analysis, self._invalid_analysis_file) # Record the built target -> sources mapping for future use. for target, sources in self._sources_for_targets(vts.targets).items(): self._record_previous_sources_by_target(target, sources) # Now that all the analysis accounting is complete, and we have no missing deps, # we can safely mark the targets as valid. vts.update()
def work_for_vts(vts, ctx): progress_message = ctx.target.address.spec # Capture a compilation log if requested. log_file = ctx.log_file if self._capture_log else None # Double check the cache before beginning compilation hit_cache = check_cache(vts) if not hit_cache: # Compute the compile classpath for this target. cp_entries = [ctx.classes_dir] # TODO: We convert to an iterator here in order to _preserve_ a bug that will be fixed # in https://github.com/pantsbuild/pants/issues/4874: `ClasspathUtil.compute_classpath` # expects to receive a list, but had been receiving an iterator. In the context of an # iterator, `excludes` are not applied # in ClasspathProducts.get_product_target_mappings_for_targets. dependencies_iter = iter(ctx.dependencies(self._dep_context)) cp_entries.extend(ClasspathUtil.compute_classpath(dependencies_iter, classpath_products, extra_compile_time_classpath, self._confs)) upstream_analysis = dict(self._upstream_analysis(compile_contexts, cp_entries)) is_incremental = should_compile_incrementally(vts, ctx) if not is_incremental: # Purge existing analysis file in non-incremental mode. safe_delete(ctx.analysis_file) # Work around https://github.com/pantsbuild/pants/issues/3670 safe_rmtree(ctx.classes_dir) tgt, = vts.targets fatal_warnings = self._compute_language_property(tgt, lambda x: x.fatal_warnings) zinc_file_manager = self._compute_language_property(tgt, lambda x: x.zinc_file_manager) with Timer() as timer: self._compile_vts(vts, ctx.target, ctx.sources, ctx.analysis_file, upstream_analysis, cp_entries, ctx.classes_dir, log_file, ctx.zinc_args_file, progress_message, tgt.platform, fatal_warnings, zinc_file_manager, counter) self._record_target_stats(tgt, len(cp_entries), len(ctx.sources), timer.elapsed, is_incremental) self._analysis_tools.relativize(ctx.analysis_file, ctx.portable_analysis_file) # Write any additional resources for this target to the target workdir. self.write_extra_resources(ctx) # Jar the compiled output. self._create_context_jar(ctx) # Update the products with the latest classes. self._register_vts([ctx]) # Once products are registered, check for unused dependencies (if enabled). if not hit_cache and self._unused_deps_check_enabled: self._check_unused_deps(ctx)
def compile_chunk(self, invalidation_check, all_targets, relevant_targets, invalid_targets, extra_compile_time_classpath_elements, compile_vts, register_vts, update_artifact_cache_vts_work): """Executes compilations for the invalid targets contained in a single chunk. Has the side effects of populating: # valid/invalid analysis files # classes_by_source product # classes_by_target product # resources_by_target product """ assert invalid_targets, "compile_chunk should only be invoked if there are invalid targets." extra_classpath_tuples = self._compute_extra_classpath(extra_compile_time_classpath_elements) # Get the classpath generated by upstream JVM tasks and our own prepare_compile(). # NB: The global strategy uses the aggregated classpath (for all targets) to compile each # chunk, which avoids needing to introduce compile-time dependencies between annotation # processors and the classes they annotate. compile_classpath = ClasspathUtil.compute_classpath(all_targets, self.context.products.get_data( 'compile_classpath'), extra_classpath_tuples, self._confs) # Find the invalid sources for this chunk. invalid_sources_by_target = {t: self._sources_for_target(t) for t in invalid_targets} tmpdir = os.path.join(self.analysis_tmpdir, str(uuid.uuid4())) os.mkdir(tmpdir) # Figure out the sources and analysis belonging to each partition. partitions = [] # Each element is a triple (vts, sources_by_target, analysis). for vts in invalidation_check.invalid_vts_partitioned: partition_tmpdir = os.path.join(tmpdir, Target.maybe_readable_identify(vts.targets)) os.mkdir(partition_tmpdir) sources = list(itertools.chain.from_iterable( [invalid_sources_by_target.get(t, []) for t in vts.targets])) de_duped_sources = list(OrderedSet(sources)) if len(sources) != len(de_duped_sources): counts = [(src, len(list(srcs))) for src, srcs in itertools.groupby(sorted(sources))] self.context.log.warn( 'De-duped the following sources:\n\t{}' .format('\n\t'.join(sorted('{} {}'.format(cnt, src) for src, cnt in counts if cnt > 1)))) analysis_file = os.path.join(partition_tmpdir, 'analysis') partitions.append((vts, de_duped_sources, analysis_file)) # Split per-partition files out of the global invalid analysis. if self._analysis_parser.is_nonempty_analysis(self._invalid_analysis_file) and partitions: with self.context.new_workunit(name='partition-analysis'): splits = [(x[1], x[2]) for x in partitions] # We have to pass the analysis for any deleted files through zinc, to give it # a chance to delete the relevant class files. if splits: splits[0] = (splits[0][0] + self._deleted_sources, splits[0][1]) self._analysis_tools.split_to_paths(self._invalid_analysis_file, splits) # Now compile partitions one by one. for partition_index, partition in enumerate(partitions): (vts, sources, analysis_file) = partition progress_message = 'partition {} of {}'.format(partition_index + 1, len(partitions)) # We have to treat the global output dir as an upstream element, so compilers can # find valid analysis for previous partitions. We use the global valid analysis # for the upstream. upstream_analysis = ({self._classes_dir: self._analysis_file} if os.path.exists(self._analysis_file) else {}) compile_vts(vts, sources, analysis_file, upstream_analysis, compile_classpath, self._classes_dir, None, progress_message) # No exception was thrown, therefore the compile succeeded and analysis_file is now valid. if os.path.exists(analysis_file): # The compilation created an analysis. # Merge the newly-valid analysis with our global valid analysis. new_valid_analysis = analysis_file + '.valid.new' if self._analysis_parser.is_nonempty_analysis(self._analysis_file): with self.context.new_workunit(name='update-upstream-analysis'): self._analysis_tools.merge_from_paths([self._analysis_file, analysis_file], new_valid_analysis) else: # We need to keep analysis_file around. Background tasks may need it. shutil.copy(analysis_file, new_valid_analysis) # Move the merged valid analysis to its proper location. # We do this before checking for missing dependencies, so that we can still # enjoy an incremental compile after fixing missing deps. self.move(new_valid_analysis, self._analysis_file) # Update the products with the latest classes. Must happen before the # missing dependencies check. register_vts([self.compile_context(t) for t in vts.targets]) if self._dep_analyzer: # Check for missing dependencies. actual_deps = self._analysis_parser.parse_deps_from_path(analysis_file, lambda: self._compute_classpath_elements_by_class(compile_classpath), self._classes_dir) with self.context.new_workunit(name='find-missing-dependencies'): self._dep_analyzer.check(sources, actual_deps) # Kick off the background artifact cache write. if update_artifact_cache_vts_work: self._write_to_artifact_cache(analysis_file, vts, update_artifact_cache_vts_work) if self._analysis_parser.is_nonempty_analysis(self._invalid_analysis_file): with self.context.new_workunit(name='trim-downstream-analysis'): # Trim out the newly-valid sources from our global invalid analysis. new_invalid_analysis = analysis_file + '.invalid.new' discarded_invalid_analysis = analysis_file + '.invalid.discard' self._analysis_tools.split_to_paths(self._invalid_analysis_file, [(sources, discarded_invalid_analysis)], new_invalid_analysis) self.move(new_invalid_analysis, self._invalid_analysis_file) # Record the built target -> sources mapping for future use. for target, sources in self._sources_for_targets(vts.targets).items(): self._record_previous_sources_by_target(target, sources) # Now that all the analysis accounting is complete, and we have no missing deps, # we can safely mark the targets as valid. vts.update()
def _compute_classpath_entries(self, classpath_products, compile_context, extra_compile_time_classpath): # Generate a classpath specific to this compile and target. target_closure = compile_context.target.closure(bfs=True) return ClasspathUtil.compute_classpath( target_closure, classpath_products, extra_compile_time_classpath, self._confs )