def _post_process(self, target, cp): """Must be called on all targets, whether they needed compilation or not.""" classes_dir, depfile, _ = self._output_paths([target]) # Update the classpath, for the benefit of tasks downstream from us. if os.path.exists(classes_dir): for conf in self._confs: cp.insert(0, (conf, classes_dir)) # Make note of the classes generated by this target. if os.path.exists(depfile) and self.context.products.isrequired('classes'): self.context.log.debug('Reading dependencies from ' + depfile) deps = Dependencies(classes_dir) deps.load(depfile) genmap = self.context.products.get('classes') for classes_by_source in deps.findclasses([target]).values(): for source, classes in classes_by_source.items(): genmap.add(source, classes_dir, classes) genmap.add(target, classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # Create and Map scala plugin info files to the owning targets. if is_scalac_plugin(target) and target.classname: basedir, plugin_info_file = self._zinc_utils.write_plugin_info(self._resources_dir, target) genmap.add(target, basedir, [plugin_info_file])
def execute(self, targets): java_targets = filter(_is_java, targets) if java_targets: safe_mkdir(self._classes_dir) safe_mkdir(self._depfile_dir) egroups = self.context.products.get_data('exclusives_groups') group_id = egroups.get_group_key_for_target(java_targets[0]) for conf in self._confs: egroups.update_compatible_classpaths(group_id, [(conf, self._resources_dir)]) egroups.update_compatible_classpaths(group_id, [(conf, self._classes_dir)]) with self.invalidated(java_targets, invalidate_dependents=True, partition_size_hint=self._partition_size_hint) as invalidation_check: for vt in invalidation_check.invalid_vts_partitioned: # Compile, using partitions for efficiency. exclusives_classpath = egroups.get_classpath_for_group(group_id) self.execute_single_compilation(vt, exclusives_classpath) if not self.dry_run: vt.update() for vt in invalidation_check.all_vts: depfile = self.create_depfile_path(vt.targets) if not self.dry_run and os.path.exists(depfile): # Read in the deps created either just now or by a previous run on these targets. deps = Dependencies(self._classes_dir) deps.load(depfile) self._deps.merge(deps) if not self.dry_run: if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. for target, classes_by_source in self._deps.findclasses(java_targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. for target in java_targets: if is_apt(target) and target.processors: basedir = os.path.join(self._resources_dir, Target.maybe_readable_identify([target])) processor_info_file = os.path.join(basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE]) # Produce a monolithic apt processor service info file for further compilation rounds # and the unit test classpath. all_processors = set() for target in java_targets: if is_apt(target) and target.processors: all_processors.update(target.processors) processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE) if os.path.exists(processor_info_file): with safe_open(processor_info_file, 'r') as f: for processor in f: all_processors.add(processor.strip()) self.write_processor_info(processor_info_file, all_processors)
def _write_to_artifact_cache(self, vts, sources_by_target): self._ensure_depfile_tmpdir() vt_by_target = dict([(vt.target, vt) for vt in vts.versioned_targets]) # This work can happen in the background, if there's a measurable benefit to that. # Split the depfile into per-target files. splits = [(sources, JavaCompile.create_depfile_path(self._depfile_tmpdir, [target])) for target, sources in sources_by_target.items()] deps = Dependencies(self._classes_dir) if os.path.exists(self._depfile): deps.load(self._depfile) deps.split(splits) # Gather up the artifacts. vts_artifactfiles_pairs = [] for target, sources in sources_by_target.items(): artifacts = [JavaCompile.create_depfile_path(self._depfile_tmpdir, [target])] for source in sources: for cls in deps.classes_by_source.get(source, []): artifacts.append(os.path.join(self._classes_dir, cls)) vt = vt_by_target.get(target) if vt is not None: vts_artifactfiles_pairs.append((vt, artifacts)) # Write to the artifact cache. self.update_artifact_cache(vts_artifactfiles_pairs)
def execute_single_compilation(self, vt, cp): depfile = self.create_depfile_path(vt.targets) self.merge_depfile(vt) # Get what we can from previous builds. sources_by_target, fingerprint = self.calculate_sources(vt.targets) if sources_by_target: sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values()) if not sources: self.context.log.warn('Skipping java compile for targets with no sources:\n %s' % '\n '.join(str(t) for t in sources_by_target.keys())) else: classpath = [jar for conf, jar in cp if conf in self._confs] result = self.compile(classpath, sources, fingerprint, depfile) if result != 0: default_message = 'Unexpected error - %s returned %d' % (_JMAKE_MAIN, result) raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message)) self.split_depfile(vt) all_artifact_files = [depfile] if self._artifact_cache and self.context.options.write_to_artifact_cache: deps = Dependencies(self._classes_dir) deps.load(depfile) vts_artifactfile_pairs = [] for single_vt in vt.versioned_targets: per_target_depfile = self.create_depfile_path([single_vt.target]) per_target_artifact_files = [per_target_depfile] for _, classes_by_source in deps.findclasses([single_vt.target]).items(): for _, classes in classes_by_source.items(): classfile_paths = [os.path.join(self._classes_dir, cls) for cls in classes] per_target_artifact_files.extend(classfile_paths) all_artifact_files.extend(classfile_paths) vts_artifactfile_pairs.append((single_vt, per_target_artifact_files)) vts_artifactfile_pairs.append((vt, all_artifact_files)) self.update_artifact_cache(vts_artifactfile_pairs)
def post_process(self, versioned_targets): depfile = self.create_depfile_path(versioned_targets.targets) if not self.dry_run and os.path.exists(depfile): # Read in the deps created either just now or by a previous compiler run on these targets. deps = Dependencies(self._classes_dir) deps.load(depfile) self.split_depfile(deps, versioned_targets) self._deps.merge(deps)
def execute(self, targets): if not self._flatten and len(targets) > 1: topologically_sorted_targets = filter(JavaCompile._is_java, reversed(InternalTarget.sort_targets(targets))) for target in topologically_sorted_targets: self.execute([target]) return self.context.log.info('Compiling targets %s' % str(targets)) java_targets = filter(JavaCompile._is_java, targets) if java_targets: with self.context.state('classpath', []) as cp: for conf in self._confs: cp.insert(0, (conf, self._resources_dir)) cp.insert(0, (conf, self._classes_dir)) with self.changed(java_targets, invalidate_dependants=True) as changed: sources_by_target, processors, fingerprint = self.calculate_sources(changed) if sources_by_target: sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values()) if not sources: self.context.log.warn('Skipping java compile for targets with no sources:\n %s' % '\n '.join(str(t) for t in sources_by_target.keys())) else: classpath = [jar for conf, jar in cp if conf in self._confs] result = self.compile(classpath, sources, fingerprint) if result != 0: default_message = 'Unexpected error - %s returned %d' % (_JMAKE_MAIN, result) raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message)) if processors: # Produce a monolithic apt processor service info file for further compilation rounds # and the unit test classpath. processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE) if os.path.exists(processor_info_file): with safe_open(processor_info_file, 'r') as f: for processor in f: processors.add(processor.strip()) self.write_processor_info(processor_info_file, processors) if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. dependencies = Dependencies(self._classes_dir, self._dependencies_file) for target, classes_by_source in dependencies.findclasses(targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. for target in targets: if is_apt(target) and target.processors: basedir = os.path.join(self._resources_dir, target.id) processor_info_file = os.path.join(basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE])
def split_artifact(self, deps, versioned_target_set): if len(versioned_target_set.targets) <= 1: return buildroot = get_buildroot() classes_by_source_by_target = deps.findclasses(versioned_target_set.targets) src_output_dir, _, src_analysis_cache = self.create_output_paths(versioned_target_set.targets) analysis_splits = [] # List of triples of (list of sources, destination output dir, destination analysis cache). # for dependency analysis, we need to record the cache files that we create in the split for target in versioned_target_set.targets: classes_by_source = classes_by_source_by_target.get(target, {}) dst_output_dir, dst_depfile, dst_analysis_cache = self.create_output_paths([target]) safe_rmtree(dst_output_dir) safe_mkdir(dst_output_dir) sources = [] dst_deps = Dependencies(dst_output_dir) for source, classes in classes_by_source.items(): src = os.path.join(target.target_base, source) dst_deps.add(src, classes) source_abspath = os.path.join(buildroot, target.target_base, source) sources.append(source_abspath) for cls in classes: # Copy the class file. dst = os.path.join(dst_output_dir, cls) safe_mkdir(os.path.dirname(dst)) os.link(os.path.join(src_output_dir, cls), dst) dst_deps.save(dst_depfile) analysis_splits.append((sources, dst_output_dir, dst_analysis_cache)) self.generated_caches.add(os.path.join(dst_output_dir, dst_analysis_cache)) # Use zinc to split the analysis files. if os.path.exists(src_analysis_cache): analysis_args = [] analysis_args.extend(self._zinc_jar_args) analysis_args.extend([ '-log-level', self.context.options.log_level or 'info', '-analysis', '-mirror-analysis' ]) split_args = analysis_args + [ '-cache', src_analysis_cache, '-split', ','.join(['{%s}:%s' % (':'.join(x[0]), x[2]) for x in analysis_splits]), ] if self.runjava(self._main, classpath=self._zinc_classpath, args=split_args, jvmargs=self._jvm_args): raise TaskError, 'zinc failed to split analysis files %s from %s' %\ (':'.join([x[2] for x in analysis_splits]), src_analysis_cache) # Now rebase the newly created analysis files. for split in analysis_splits: dst_analysis_cache = split[2] if os.path.exists(dst_analysis_cache): rebase_args = analysis_args + [ '-cache', dst_analysis_cache, '-rebase', '%s:%s' % (src_output_dir, split[1]), ] if self.runjava(self._main, classpath=self._zinc_classpath, args=rebase_args, jvmargs=self._jvm_args): raise TaskError, 'In split_artifact: zinc failed to rebase analysis file %s' % dst_analysis_cache
def _split_artifact(self, deps, versioned_target_set): """Splits an artifact representing several targets into target-by-target artifacts. Creates an output classes dir, a depfile and an analysis file for each target. Note that it's not OK to create incomplete artifacts here: this is run *after* a zinc invocation, and the expectation is that the result is complete. NOTE: This method is reentrant. """ if len(versioned_target_set.targets) <= 1: return classes_by_source_by_target = deps.findclasses(versioned_target_set.targets) src_classes_dir, _, src_analysis_file = self._output_paths(versioned_target_set.targets) # Specifies that the list of sources defines a split to the classes dir and analysis file. SplitInfo = namedtuple('SplitInfo', ['sources', 'dst_classes_dir', 'dst_analysis_file']) analysis_splits = [] # List of SplitInfos. portable_analysis_splits = [] # The same, for the portable version of the analysis cache. # Prepare the split arguments. for target in versioned_target_set.targets: classes_by_source = classes_by_source_by_target.get(target, {}) dst_classes_dir, dst_depfile, dst_analysis_file = self._output_paths([target]) safe_rmtree(dst_classes_dir) safe_mkdir(dst_classes_dir) sources = [] dst_deps = Dependencies(dst_classes_dir) for source, classes in classes_by_source.items(): src = os.path.join(target.target_base, source) dst_deps.add(src, classes) sources.append(os.path.join(target.target_base, source)) for cls in classes: # Copy the class file. dst = os.path.join(dst_classes_dir, cls) safe_mkdir(os.path.dirname(dst)) os.link(os.path.join(src_classes_dir, cls), dst) dst_deps.save(dst_depfile) analysis_splits.append(SplitInfo(sources, dst_classes_dir, dst_analysis_file)) portable_analysis_splits.append(SplitInfo(sources, dst_classes_dir, _portable(dst_analysis_file))) def do_split(src_analysis_file, splits): if os.path.exists(src_analysis_file): if self._zinc_utils.run_zinc_split(src_analysis_file, [(x.sources, x.dst_analysis_file) for x in splits]): raise TaskError, 'zinc failed to split analysis files %s from %s' %\ (':'.join([x.dst_analysis_file for x in splits]), src_analysis_file) for split in splits: if os.path.exists(split.dst_analysis_file): if self._zinc_utils.run_zinc_rebase(split.dst_analysis_file, [(src_classes_dir, split.dst_classes_dir)]): raise TaskError, \ 'In split_artifact: zinc failed to rebase analysis file %s' % split.dst_analysis_file # Now rebase the newly created analysis file(s) to reflect the split classes dirs. do_split(src_analysis_file, analysis_splits) do_split(_portable(src_analysis_file), portable_analysis_splits)
def _compute_classes_by_source(self, depfile=None): """Compute src->classes.""" if depfile is None: depfile = self._depfile if not os.path.exists(depfile): return {} deps = Dependencies(self._classes_dir) deps.load(depfile) return deps.classes_by_source
def _compile(self, versioned_target_set, classpath, upstream_analysis_files): """Actually compile some targets. May be invoked concurrently on independent target sets. Postcondition: The individual targets in versioned_target_set are up-to-date, as if each were compiled individually. """ # Note: We actually compile all the targets in the set in a single zinc call, because # compiler invocation overhead is high, but this fact is not exposed outside this method. classes_dir, depfile, analysis_file = self._output_paths(versioned_target_set.targets) safe_mkdir(classes_dir) # Get anything we have from previous builds. self._merge_artifact(versioned_target_set) # Compute the sources we need to compile. sources_by_target = ScalaCompile._calculate_sources(versioned_target_set.targets) if sources_by_target: sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values()) if not sources: self.context.log.warn('Skipping scala compile for targets with no sources:\n %s' % '\n '.join(str(t) for t in sources_by_target.keys())) else: # Invoke the compiler. self.context.log.info('Compiling targets %s' % versioned_target_set.targets) if self._zinc_utils.compile(classpath, sources, classes_dir, analysis_file, upstream_analysis_files, depfile): raise TaskError('Compile failed.') # Read in the deps we just created. self.context.log.debug('Reading dependencies from ' + depfile) deps = Dependencies(classes_dir) deps.load(depfile) # Split the artifact into per-target artifacts. self._split_artifact(deps, versioned_target_set) # Write to artifact cache, if needed. for vt in versioned_target_set.versioned_targets: vt_classes_dir, vt_depfile, vt_analysis_file = self._output_paths(vt.targets) vt_portable_analysis_file = _portable(vt_analysis_file) if self._artifact_cache and self.context.options.write_to_artifact_cache: # Relativize the analysis. # TODO: Relativize before splitting? This will require changes to Zinc, which currently # eliminates paths it doesn't recognize (including our placeholders) when splitting. if os.path.exists(vt_analysis_file) and \ self._zinc_utils.relativize_analysis_file(vt_analysis_file, vt_portable_analysis_file): raise TaskError('Zinc failed to relativize analysis file: %s' % vt_analysis_file) # Write the per-target artifacts to the cache. artifacts = [vt_classes_dir, vt_depfile, vt_portable_analysis_file] self.update_artifact_cache(vt, artifacts) else: safe_rmtree(vt_portable_analysis_file) # Don't leave cruft lying around.
def execute(self, targets): java_targets = [t for t in targets if t.has_sources(".java")] if not java_targets: return # Get the exclusives group for the targets to compile. # Group guarantees that they'll be a single exclusives key for them. egroups = self.context.products.get_data("exclusives_groups") group_id = egroups.get_group_key_for_target(java_targets[0]) # Add classes and resource dirs to the classpath for us and for downstream tasks. for conf in self._confs: egroups.update_compatible_classpaths(group_id, [(conf, self._classes_dir)]) egroups.update_compatible_classpaths(group_id, [(conf, self._resources_dir)]) # Get the classpath generated by upstream JVM tasks (including previous calls to execute()). cp = egroups.get_classpath_for_group(group_id) with self.invalidated( java_targets, invalidate_dependents=True, partition_size_hint=self._partition_size_hint ) as invalidation_check: if not self.dry_run: for vts in invalidation_check.invalid_vts_partitioned: # Compile, using partitions for efficiency. sources_by_target = self._process_target_partition(vts, cp) # TODO: Check for missing dependencies. See ScalaCompile for an example. # Will require figuring out what the actual deps of a class file are. vts.update() if self.artifact_cache_writes_enabled(): self._write_to_artifact_cache(vts, sources_by_target) # Provide the target->class and source->class mappings to downstream tasks if needed. if self.context.products.isrequired("classes"): if os.path.exists(self._depfile): sources_by_target = self._compute_sources_by_target(java_targets) deps = Dependencies(self._classes_dir) deps.load(self._depfile) self._add_all_products_to_genmap(sources_by_target, deps.classes_by_source) # Produce a monolithic apt processor service info file for further compilation rounds # and the unit test classpath. all_processors = set() for target in java_targets: if target.is_apt and target.processors: all_processors.update(target.processors) processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE) if os.path.exists(processor_info_file): with safe_open(processor_info_file, "r") as f: for processor in f: all_processors.add(processor.strip()) self.write_processor_info(processor_info_file, all_processors)
def _merge_artifact(self, versioned_target_set): """Merges artifacts representing the individual targets in a VersionedTargetSet into one artifact for that set. Creates an output classes dir, depfile and analysis file for the VersionedTargetSet. Note that the merged artifact may be incomplete (e.g., if we have no previous artifacts for some of the individual targets). That's OK: We run this right before we invoke zinc, which will fill in what's missing. This method is not required for correctness, only for efficiency: it can prevent zinc from doing superfluous work. NOTE: This method is reentrant. """ if len(versioned_target_set.targets) <= 1: return # Nothing to do. with temporary_dir() as tmpdir: dst_classes_dir, dst_depfile, dst_analysis_file = self._output_paths(versioned_target_set.targets) safe_rmtree(dst_classes_dir) safe_mkdir(dst_classes_dir) src_analysis_files = [] # TODO: Do we actually need to merge deps? Zinc will stomp them anyway on success. dst_deps = Dependencies(dst_classes_dir) for target in versioned_target_set.targets: src_classes_dir, src_depfile, src_analysis_file = self._output_paths([target]) if os.path.exists(src_depfile): src_deps = Dependencies(src_classes_dir) src_deps.load(src_depfile) dst_deps.merge(src_deps) classes_by_source = src_deps.findclasses([target]).get(target, {}) for source, classes in classes_by_source.items(): for cls in classes: src = os.path.join(src_classes_dir, cls) dst = os.path.join(dst_classes_dir, cls) # src may not exist if we aborted a build in the middle. That's OK: zinc will notice that # it's missing and rebuild it. # dst may already exist if we have overlapping targets. It's not a good idea # to have those, but until we enforce it, we must allow it here. if os.path.exists(src) and not os.path.exists(dst): # Copy the class file. safe_mkdir(os.path.dirname(dst)) os.link(src, dst) # Rebase a copy of the per-target analysis files to reflect the merged classes dir. if os.path.exists(src_analysis_file): src_analysis_file_tmp = \ os.path.join(tmpdir, os.path.relpath(src_analysis_file, self._analysis_files_base)) shutil.copyfile(src_analysis_file, src_analysis_file_tmp) src_analysis_files.append(src_analysis_file_tmp) if self._zinc_utils.run_zinc_rebase(src_analysis_file_tmp, [(src_classes_dir, dst_classes_dir)]): self.context.log.warn('In merge_artifact: zinc failed to rebase analysis file %s. '\ 'Target may require a full rebuild.' %\ src_analysis_file_tmp) dst_deps.save(dst_depfile) if self._zinc_utils.run_zinc_merge(src_analysis_files, dst_analysis_file): self.context.log.warn('zinc failed to merge analysis files %s to %s. '\ 'Target may require a full rebuild.' %\ (':'.join(src_analysis_files), dst_analysis_file))
def split_depfile(self, deps, versioned_target_set): if len(versioned_target_set.targets) <= 1: return classes_by_source_by_target = deps.findclasses(versioned_target_set.targets) for target in versioned_target_set.targets: classes_by_source = classes_by_source_by_target.get(target, {}) dst_depfile = self.create_depfile_path([target]) dst_deps = Dependencies(self._classes_dir) for source, classes in classes_by_source.items(): src = os.path.join(target.target_base, source) dst_deps.add(src, classes) dst_deps.save(dst_depfile)
def post_process_cached_vts(cached_vts): # Merge the cached analyses into the existing global one. if cached_vts: with self.context.new_workunit(name='merge-dependencies'): global_deps = Dependencies(self._classes_dir) if os.path.exists(self._depfile): global_deps.load(self._depfile) for vt in cached_vts: for target in vt.targets: depfile = JavaCompile.create_depfile_path(self._depfile_tmpdir, [target]) if os.path.exists(depfile): deps = Dependencies(self._classes_dir) deps.load(depfile) global_deps.merge(deps) global_deps.save(self._depfile)
def __init__(self, context): NailgunTask.__init__(self, context, workdir=context.config.get("java-compile", "nailgun_dir")) self._partition_size_hint = ( context.options.java_compile_partition_size_hint if context.options.java_compile_partition_size_hint != -1 else context.config.getint("java-compile", "partition_size_hint") ) workdir = context.config.get("java-compile", "workdir") self._classes_dir = os.path.join(workdir, "classes") self._resources_dir = os.path.join(workdir, "resources") self._depfile_dir = os.path.join(workdir, "depfiles") self._deps = Dependencies(self._classes_dir) self._jmake_profile = context.config.get("java-compile", "jmake-profile") self._compiler_profile = context.config.get("java-compile", "compiler-profile") self._args = context.config.getlist("java-compile", "args") self._jvm_args = context.config.getlist("java-compile", "jvm_args") if context.options.java_compile_warnings: self._args.extend(context.config.getlist("java-compile", "warning_args")) else: self._args.extend(context.config.getlist("java-compile", "no_warning_args")) self._confs = context.config.getlist("java-compile", "confs") # The artifact cache to read from/write to. artifact_cache_spec = context.config.getlist("java-compile", "artifact_caches") self.setup_artifact_cache(artifact_cache_spec)
def __init__(self, context): NailgunTask.__init__(self, context, workdir=context.config.get('java-compile', 'nailgun_dir')) self._flatten = \ context.options.java_compile_flatten if context.options.java_compile_flatten is not None else \ context.config.getbool('java-compile', 'default_to_flatten') workdir = context.config.get('java-compile', 'workdir') self._classes_dir = os.path.join(workdir, 'classes') self._resources_dir = os.path.join(workdir, 'resources') self._depfile_dir = os.path.join(workdir, 'depfiles') self._deps = Dependencies(self._classes_dir) self._jmake_profile = context.config.get('java-compile', 'jmake-profile') self._compiler_profile = context.config.get('java-compile', 'compiler-profile') self._args = context.config.getlist('java-compile', 'args') self._jvm_args = context.config.getlist('java-compile', 'jvm_args') if context.options.java_compile_warnings: self._args.extend(context.config.getlist('java-compile', 'warning_args')) else: self._args.extend(context.config.getlist('java-compile', 'no_warning_args')) self._confs = context.config.getlist('java-compile', 'confs')
def __init__(self, context): NailgunTask.__init__(self, context, workdir=context.config.get('java-compile', 'nailgun_dir')) self._partition_size_hint = \ context.options.java_compile_partition_size_hint \ if context.options.java_compile_partition_size_hint != -1 \ else context.config.getint('java-compile', 'partition_size_hint') workdir = context.config.get('java-compile', 'workdir') self._classes_dir = os.path.join(workdir, 'classes') self._resources_dir = os.path.join(workdir, 'resources') self._depfile_dir = os.path.join(workdir, 'depfiles') self._deps = Dependencies(self._classes_dir) self._jmake_profile = context.config.get('java-compile', 'jmake-profile') self._compiler_profile = context.config.get('java-compile', 'compiler-profile') self._args = context.config.getlist('java-compile', 'args') self._jvm_args = context.config.getlist('java-compile', 'jvm_args') if context.options.java_compile_warnings: self._args.extend(context.config.getlist('java-compile', 'warning_args')) else: self._args.extend(context.config.getlist('java-compile', 'no_warning_args')) self._confs = context.config.getlist('java-compile', 'confs') # The artifact cache to read from/write to. artifact_cache_spec = context.config.getlist('java-compile', 'artifact_caches') self.setup_artifact_cache(artifact_cache_spec)
def execute_single_compilation(self, java_targets, cp): self.context.log.info('Compiling targets %s' % str(java_targets)) # Compute the id of this compilation. We try to make it human-readable. if len(java_targets) == 1: compilation_id = java_targets[0].id else: compilation_id = self.context.identify(java_targets) if self._flatten: # If compiling in flat mode, we let all dependencies aggregate into a single well-known depfile. This # allows us to build different targets in different invocations without losing dependency information # from any of them. depfile = os.path.join(self._depfile_dir, 'dependencies.flat') else: # If not in flat mode, we let each compilation have its own depfile, to avoid quadratic behavior (each # compilation will read in the entire depfile, add its stuff to it and write it out again). depfile = os.path.join(self._depfile_dir, compilation_id) + '.dependencies' with self.changed(java_targets, invalidate_dependants=True) as changed: sources_by_target, processors, fingerprint = self.calculate_sources(changed) if sources_by_target: sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values()) if not sources: self.context.log.warn('Skipping java compile for targets with no sources:\n %s' % '\n '.join(str(t) for t in sources_by_target.keys())) else: classpath = [jar for conf, jar in cp if conf in self._confs] result = self.compile(classpath, sources, fingerprint, depfile) if result != 0: default_message = 'Unexpected error - %s returned %d' % (_JMAKE_MAIN, result) raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message)) if processors: # Produce a monolithic apt processor service info file for further compilation rounds # and the unit test classpath. processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE) if os.path.exists(processor_info_file): with safe_open(processor_info_file, 'r') as f: for processor in f: processors.add(processor.strip()) self.write_processor_info(processor_info_file, processors) # Read in the deps created either just now or by a previous compiler run on these targets. deps = Dependencies(self._classes_dir) deps.load(depfile) self._deps.merge(deps)
def execute_single_compilation(self, versioned_targets, cp): compilation_id = Target.maybe_readable_identify(versioned_targets.targets) # TODO: Use the artifact cache. In flat mode we may want to look for the artifact for all targets, # not just the invalid ones, as it might be more likely to be present. Or we could look for both. if self._flatten: # If compiling in flat mode, we let all dependencies aggregate into a single well-known depfile. This # allows us to build different targets in different invocations without losing dependency information # from any of them. depfile = os.path.join(self._depfile_dir, 'dependencies.flat') else: # If not in flat mode, we let each compilation have its own depfile, to avoid quadratic behavior (each # compilation will read in the entire depfile, add its stuff to it and write it out again). depfile = os.path.join(self._depfile_dir, compilation_id) + '.dependencies' if not versioned_targets.valid: self.context.log.info('Compiling targets %s' % str(versioned_targets.targets)) sources_by_target, processors, fingerprint = self.calculate_sources(versioned_targets.targets) if sources_by_target: sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values()) if not sources: touch(depfile) # Create an empty depfile, since downstream code may assume that one exists. self.context.log.warn('Skipping java compile for targets with no sources:\n %s' % '\n '.join(str(t) for t in sources_by_target.keys())) else: classpath = [jar for conf, jar in cp if conf in self._confs] result = self.compile(classpath, sources, fingerprint, depfile) if result != 0: default_message = 'Unexpected error - %s returned %d' % (_JMAKE_MAIN, result) raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message)) if processors: # Produce a monolithic apt processor service info file for further compilation rounds # and the unit test classpath. processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE) if os.path.exists(processor_info_file): with safe_open(processor_info_file, 'r') as f: for processor in f: processors.add(processor.strip()) self.write_processor_info(processor_info_file, processors) # Read in the deps created either just now or by a previous compiler run on these targets. deps = Dependencies(self._classes_dir) deps.load(depfile) self._deps.merge(deps)
def merge_artifact(self, versioned_target_set): if len(versioned_target_set.targets) <= 1: return with temporary_dir() as tmpdir: dst_output_dir, dst_depfile, dst_analysis_cache = self.create_output_paths(versioned_target_set.targets) safe_rmtree(dst_output_dir) safe_mkdir(dst_output_dir) src_analysis_caches = [] # TODO: Do we actually need to merge deps? Zinc will stomp them anyway on success. dst_deps = Dependencies(dst_output_dir) for target in versioned_target_set.targets: src_output_dir, src_depfile, src_analysis_cache = self.create_output_paths([target]) if os.path.exists(src_depfile): src_deps = Dependencies(src_output_dir) src_deps.load(src_depfile) dst_deps.merge(src_deps) classes_by_source = src_deps.findclasses([target]).get(target, {}) for source, classes in classes_by_source.items(): for cls in classes: src = os.path.join(src_output_dir, cls) dst = os.path.join(dst_output_dir, cls) # src may not exist if we aborted a build in the middle. That's OK: zinc will notice that # it's missing and rebuild it. # dst may already exist if we have overlapping targets. It's not a good idea # to have those, but until we enforce it, we must allow it here. if os.path.exists(src) and not os.path.exists(dst): # Copy the class file. safe_mkdir(os.path.dirname(dst)) os.link(src, dst) # Rebase a copy of the per-target analysis files prior to merging. if os.path.exists(src_analysis_cache): src_analysis_cache_tmp = \ os.path.join(tmpdir, os.path.relpath(src_analysis_cache, self._analysis_cache_dir)) shutil.copyfile(src_analysis_cache, src_analysis_cache_tmp) src_analysis_caches.append(src_analysis_cache_tmp) if self._zinc_utils.run_zinc_rebase(cache=src_analysis_cache_tmp, rebasings=[(src_output_dir, dst_output_dir)]): self.context.log.warn('In merge_artifact: zinc failed to rebase analysis file %s. ' \ 'Target may require a full rebuild.' % \ src_analysis_cache_tmp) dst_deps.save(dst_depfile) if self._zinc_utils.run_zinc_merge(src_caches=src_analysis_caches, dst_cache=dst_analysis_cache): self.context.log.warn('zinc failed to merge analysis files %s to %s. ' \ 'Target may require a full rebuild.' % \ (':'.join(src_analysis_caches), dst_analysis_cache))
def execute(self, targets): if not self._flatten and len(targets) > 1: topologically_sorted_targets = filter(is_scala, reversed(InternalTarget.sort_targets(targets))) for target in topologically_sorted_targets: self.execute([target]) return self.context.log.info('Compiling targets %s' % str(targets)) scala_targets = filter(is_scala, targets) if scala_targets: with self.context.state('classpath', []) as cp: for conf in self._confs: cp.insert(0, (conf, self._resources_dir)) cp.insert(0, (conf, self._classes_dir)) with self.changed(scala_targets, invalidate_dependants=True) as changed_targets: sources_by_target = self.calculate_sources(changed_targets) if sources_by_target: sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values()) if not sources: self.context.log.warn('Skipping scala compile for targets with no sources:\n %s' % '\n '.join(str(t) for t in sources_by_target.keys())) else: classpath = [jar for conf, jar in cp if conf in self._confs] result = self.compile(classpath, sources) if result != 0: raise TaskError('%s returned %d' % (self._main, result)) if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. dependencies = Dependencies(self._classes_dir, self._depfile) for target, classes_by_source in dependencies.findclasses(targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # Create and Map scala plugin info files to the owning targets. for target in targets: if is_scalac_plugin(target) and target.classname: basedir = self.write_plugin_info(target) genmap.add(target, basedir, [_PLUGIN_INFO_FILE])
def execute(self, targets): java_targets = filter(JavaCompile._has_java_sources, targets) if java_targets: safe_mkdir(self._classes_dir) safe_mkdir(self._depfile_dir) with self.context.state('classpath', []) as cp: for conf in self._confs: cp.insert(0, (conf, self._resources_dir)) cp.insert(0, (conf, self._classes_dir)) with self.invalidated(java_targets, invalidate_dependents=True, partition_size_hint=self._partition_size_hint) as invalidation_check: for vt in invalidation_check.invalid_vts_partitioned: # Compile, using partitions for efficiency. self.execute_single_compilation(vt, cp) if not self.dry_run: vt.update() for vt in invalidation_check.all_vts: depfile = self.create_depfile_path(vt.targets) if not self.dry_run and os.path.exists(depfile): # Read in the deps created either just now or by a previous run on these targets. deps = Dependencies(self._classes_dir) deps.load(depfile) self._deps.merge(deps) if not self.dry_run: if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. for target, classes_by_source in self._deps.findclasses(java_targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. for target in java_targets: if is_apt(target) and target.processors: basedir = os.path.join(self._resources_dir, target.id) processor_info_file = os.path.join(basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE])
def execute_single_compilation(self, scala_targets, cp, upstream_analysis_caches): """Execute a single compilation, updating upstream_analysis_caches if needed.""" self.context.log.info('Compiling targets %s' % str(scala_targets)) compilation_id = self.context.maybe_readable_identify(scala_targets) # Each compilation must output to its own directory, so zinc can then associate those with the appropriate # analysis caches of previous compilations. We then copy the results out to the real output dir. output_dir = os.path.join(self._incremental_classes_dir, compilation_id) depfile = os.path.join(self._depfile_dir, compilation_id) + '.dependencies' analysis_cache = os.path.join(self._analysis_cache_dir, compilation_id) + '.analysis_cache' # We must defer dependency analysis to zinc. If we exclude files from a repeat build, zinc will assume # the files were deleted and will nuke the corresponding class files. invalidate_globally = self._flatten with self.changed(scala_targets, invalidate_dependants=True, invalidate_globally=invalidate_globally) as changed_targets: sources_by_target = self.calculate_sources(changed_targets) if sources_by_target: sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values()) if not sources: self.context.log.warn('Skipping scala compile for targets with no sources:\n %s' % '\n '.join(str(t) for t in sources_by_target.keys())) else: classpath = [jar for conf, jar in cp if conf in self._confs] result = self.compile(classpath, sources, output_dir, analysis_cache, upstream_analysis_caches, depfile) if result != 0: raise TaskError('%s returned %d' % (self._main, result)) # Link class files emitted in this compilation into the central classes dir. self.link_all(output_dir, self._classes_dir) # Read in the deps created either just now or by a previous compiler run on these targets. self.context.log.debug('Reading dependencies from ' + depfile) deps = Dependencies(output_dir) deps.load(depfile) self._deps.merge(deps) analysis_cache_parts = os.path.split(analysis_cache) if not upstream_analysis_caches.has(output_dir): # A previous chunk might have already updated this. It is certainly possible for a later chunk to # independently depend on some target that a previous chunk already built. upstream_analysis_caches.add(output_dir, analysis_cache_parts[0], [ analysis_cache_parts[1] ]) return compilation_id
def split_artifact(self, deps, versioned_target_set): if len(versioned_target_set.targets) <= 1: return classes_by_source_by_target = deps.findclasses(versioned_target_set.targets) src_output_dir, _, src_analysis_cache = self.create_output_paths(versioned_target_set.targets) analysis_splits = [] # List of triples of (list of sources, destination output dir, destination analysis cache). # for dependency analysis, we need to record the cache files that we create in the split for target in versioned_target_set.targets: classes_by_source = classes_by_source_by_target.get(target, {}) dst_output_dir, dst_depfile, dst_analysis_cache = self.create_output_paths([target]) safe_rmtree(dst_output_dir) safe_mkdir(dst_output_dir) sources = [] dst_deps = Dependencies(dst_output_dir) for source, classes in classes_by_source.items(): src = os.path.join(target.target_base, source) dst_deps.add(src, classes) sources.append(os.path.join(target.target_base, source)) for cls in classes: # Copy the class file. dst = os.path.join(dst_output_dir, cls) safe_mkdir(os.path.dirname(dst)) os.link(os.path.join(src_output_dir, cls), dst) dst_deps.save(dst_depfile) analysis_splits.append((sources, dst_output_dir, dst_analysis_cache)) self.generated_caches.add(os.path.join(dst_output_dir, dst_analysis_cache)) # Split the analysis files. if os.path.exists(src_analysis_cache): if self._zinc_utils.run_zinc_split(src_cache=src_analysis_cache, splits=[(x[0], x[2]) for x in analysis_splits]): raise TaskError, 'zinc failed to split analysis files %s from %s' %\ (':'.join([x[2] for x in analysis_splits]), src_analysis_cache) # Now rebase the newly created analysis files. for split in analysis_splits: dst_analysis_cache = split[2] dst_output_dir = split[1] if os.path.exists(dst_analysis_cache): rebasings = [(src_output_dir, dst_output_dir)] if self._zinc_utils.run_zinc_rebase(cache=dst_analysis_cache, rebasings=rebasings): raise TaskError, 'In split_artifact: zinc failed to rebase analysis file %s' % dst_analysis_cache
def split_depfile(self, vt): depfile = self.create_depfile_path(vt.targets) if len(vt.targets) <= 1 or not os.path.exists(depfile) or self.dry_run: return deps = Dependencies(self._classes_dir) deps.load(depfile) classes_by_source_by_target = deps.findclasses(vt.targets) for target in vt.targets: classes_by_source = classes_by_source_by_target.get(target, {}) dst_depfile = self.create_depfile_path([target]) dst_deps = Dependencies(self._classes_dir) for source, classes in classes_by_source.items(): src = os.path.join(target.target_base, source) dst_deps.add(src, classes) dst_deps.save(dst_depfile)
def merge_depfile(self, versioned_target_set): if len(versioned_target_set.targets) <= 1: return dst_depfile = self.create_depfile_path(versioned_target_set.targets) dst_deps = Dependencies(self._classes_dir) for target in versioned_target_set.targets: src_depfile = self.create_depfile_path([target]) if os.path.exists(src_depfile): src_deps = Dependencies(self._classes_dir) src_deps.load(src_depfile) dst_deps.merge(src_deps) dst_deps.save(dst_depfile)
def post_process(self, vt, upstream_analysis_caches, split_artifact): output_dir, depfile, analysis_cache = self.create_output_paths(vt.targets) if not self.dry_run: # Read in the deps created either just now or by a previous compiler run on these targets. if os.path.exists(depfile): self.context.log.debug('Reading dependencies from ' + depfile) deps = Dependencies(output_dir) deps.load(depfile) if split_artifact: self.split_artifact(deps, vt) if self.context.products.isrequired('classes') : genmap = self.context.products.get('classes') for target, classes_by_source in deps.findclasses(vt.targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, output_dir, classes) genmap.add(target, output_dir, classes) # TODO(John Sirois): Map target.resources in the same way # Create and Map scala plugin info files to the owning targets. for target in vt.targets: if is_scalac_plugin(target) and target.classname: basedir = self.write_plugin_info(target) genmap.add(target, basedir, [_PLUGIN_INFO_FILE]) # Update the upstream analysis map. if os.path.exists(analysis_cache): analysis_cache_parts = os.path.split(analysis_cache) if not upstream_analysis_caches.has(output_dir): # A previous chunk might have already updated this. It is certainly possible for a later chunk to # independently depend on some target that a previous chunk already built. upstream_analysis_caches.add(output_dir, analysis_cache_parts[0], [ analysis_cache_parts[1] ]) # Update the classpath. with self.context.state('classpath', []) as cp: for conf in self._confs: cp.insert(0, (conf, output_dir))
def __init__(self, context): NailgunTask.__init__(self, context, workdir=context.config.get('scala-compile', 'nailgun_dir')) self._incremental = \ context.options.scala_compile_incremental if context.options.scala_compile_incremental is not None else \ context.config.getbool('scala-compile', 'default_to_incremental') self._flatten = \ context.options.scala_compile_flatten if context.options.scala_compile_flatten is not None else \ context.config.getbool('scala-compile', 'default_to_flatten') self._compile_profile = context.config.get('scala-compile', 'compile-profile') # The target scala version. self._zinc_profile = context.config.get('scala-compile', 'zinc-profile') self._depemitter_profile = context.config.get('scala-compile', 'dependencies-plugin-profile') # All scala targets implicitly depend on the selected scala runtime. scaladeps = [] for spec in context.config.getlist('scala-compile', 'scaladeps'): scaladeps.extend(context.resolve(spec)) for target in context.targets(is_scala): target.update_dependencies(scaladeps) workdir = context.config.get('scala-compile', 'workdir') self._incremental_classes_dir = os.path.join(workdir, 'incremental.classes') self._classes_dir = os.path.join(workdir, 'classes') self._analysis_cache_dir = os.path.join(workdir, 'analysis_cache') self._resources_dir = os.path.join(workdir, 'resources') if self._incremental: self._main = context.config.get('scala-compile', 'zinc-main') else: self._main = context.config.get('scala-compile', 'main') self._args = context.config.getlist('scala-compile', 'args') self._jvm_args = context.config.getlist('scala-compile', 'jvm_args') if context.options.scala_compile_warnings: self._args.extend(context.config.getlist('scala-compile', 'warning_args')) else: self._args.extend(context.config.getlist('scala-compile', 'no_warning_args')) self._confs = context.config.getlist('scala-compile', 'confs') self._depfile_dir = os.path.join(workdir, 'depfiles') self._deps = Dependencies(self._classes_dir)
def __init__(self, context): NailgunTask.__init__(self, context, workdir=context.config.get('java-compile', 'nailgun_dir')) if context.options.java_compile_partition_size_hint != -1: self._partition_size_hint = context.options.java_compile_partition_size_hint else: self._partition_size_hint = context.config.getint('java-compile', 'partition_size_hint', default=1000) workdir = context.config.get('java-compile', 'workdir') self._classes_dir = os.path.join(workdir, 'classes') self._resources_dir = os.path.join(workdir, 'resources') self._depfile_dir = os.path.join(workdir, 'depfiles') self._deps = Dependencies(self._classes_dir) self._jmake_profile = context.config.get('java-compile', 'jmake-profile') self._compiler_profile = context.config.get('java-compile', 'compiler-profile') self._opts = context.config.getlist('java-compile', 'args') self._jvm_args = context.config.getlist('java-compile', 'jvm_args') self._javac_opts = [] if context.options.java_compile_args: for arg in context.options.java_compile_args: self._javac_opts.extend(shlex.split(arg)) else: self._javac_opts.extend(context.config.getlist('java-compile', 'javac_args', default=[])) if context.options.java_compile_warnings: self._opts.extend(context.config.getlist('java-compile', 'warning_args')) else: self._opts.extend(context.config.getlist('java-compile', 'no_warning_args')) self._confs = context.config.getlist('java-compile', 'confs') self.context.products.require_data('exclusives_groups') # The artifact cache to read from/write to. artifact_cache_spec = context.config.getlist('java-compile', 'artifact_caches', default=[]) self.setup_artifact_cache(artifact_cache_spec)
class JavaCompile(NailgunTask): @classmethod def setup_parser(cls, option_group, args, mkflag): NailgunTask.setup_parser(option_group, args, mkflag) option_group.add_option(mkflag("warnings"), mkflag("warnings", negate=True), dest="java_compile_warnings", default=True, action="callback", callback=mkflag.set_bool, help="[%default] Compile java code with all configured warnings " "enabled.") option_group.add_option(mkflag("args"), dest="java_compile_args", action="append", help="Pass these extra args to javac.") option_group.add_option(mkflag("partition-size-hint"), dest="java_compile_partition_size_hint", action="store", type="int", default=-1, help="Roughly how many source files to attempt to compile together. Set" " to a large number to compile all sources together. Set this to 0" " to compile target-by-target. Default is set in pants.ini.") def __init__(self, context): NailgunTask.__init__(self, context, workdir=context.config.get('java-compile', 'nailgun_dir')) if context.options.java_compile_partition_size_hint != -1: self._partition_size_hint = context.options.java_compile_partition_size_hint else: self._partition_size_hint = context.config.getint('java-compile', 'partition_size_hint', default=1000) workdir = context.config.get('java-compile', 'workdir') self._classes_dir = os.path.join(workdir, 'classes') self._resources_dir = os.path.join(workdir, 'resources') self._depfile_dir = os.path.join(workdir, 'depfiles') self._deps = Dependencies(self._classes_dir) self._jmake_profile = context.config.get('java-compile', 'jmake-profile') self._compiler_profile = context.config.get('java-compile', 'compiler-profile') self._opts = context.config.getlist('java-compile', 'args') self._jvm_args = context.config.getlist('java-compile', 'jvm_args') self._javac_opts = [] if context.options.java_compile_args: for arg in context.options.java_compile_args: self._javac_opts.extend(shlex.split(arg)) else: self._javac_opts.extend(context.config.getlist('java-compile', 'javac_args', default=[])) if context.options.java_compile_warnings: self._opts.extend(context.config.getlist('java-compile', 'warning_args')) else: self._opts.extend(context.config.getlist('java-compile', 'no_warning_args')) self._confs = context.config.getlist('java-compile', 'confs') # The artifact cache to read from/write to. artifact_cache_spec = context.config.getlist('java-compile', 'artifact_caches') self.setup_artifact_cache(artifact_cache_spec) def product_type(self): return 'classes' def can_dry_run(self): return True def execute(self, targets): java_targets = filter(_is_java, targets) if java_targets: safe_mkdir(self._classes_dir) safe_mkdir(self._depfile_dir) with self.context.state('classpath', []) as cp: for conf in self._confs: cp.insert(0, (conf, self._resources_dir)) cp.insert(0, (conf, self._classes_dir)) with self.invalidated(java_targets, invalidate_dependents=True, partition_size_hint=self._partition_size_hint) as invalidation_check: for vt in invalidation_check.invalid_vts_partitioned: # Compile, using partitions for efficiency. self.execute_single_compilation(vt, cp) if not self.dry_run: vt.update() for vt in invalidation_check.all_vts: depfile = self.create_depfile_path(vt.targets) if not self.dry_run and os.path.exists(depfile): # Read in the deps created either just now or by a previous run on these targets. deps = Dependencies(self._classes_dir) deps.load(depfile) self._deps.merge(deps) if not self.dry_run: if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. for target, classes_by_source in self._deps.findclasses(java_targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. for target in java_targets: if is_apt(target) and target.processors: basedir = os.path.join(self._resources_dir, Target.maybe_readable_identify([target])) processor_info_file = os.path.join(basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE]) # Produce a monolithic apt processor service info file for further compilation rounds # and the unit test classpath. all_processors = set() for target in java_targets: if is_apt(target) and target.processors: all_processors.update(target.processors) processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE) if os.path.exists(processor_info_file): with safe_open(processor_info_file, 'r') as f: for processor in f: all_processors.add(processor.strip()) self.write_processor_info(processor_info_file, all_processors) def execute_single_compilation(self, vt, cp): depfile = self.create_depfile_path(vt.targets) self.merge_depfile(vt) # Get what we can from previous builds. sources_by_target, fingerprint = self.calculate_sources(vt.targets) if sources_by_target: sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values()) if not sources: self.context.log.warn('Skipping java compile for targets with no sources:\n %s' % '\n '.join(str(t) for t in sources_by_target.keys())) else: classpath = [jar for conf, jar in cp if conf in self._confs] result = self.compile(classpath, sources, fingerprint, depfile) if result != 0: default_message = 'Unexpected error - %s returned %d' % (_JMAKE_MAIN, result) raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message)) self.split_depfile(vt) all_artifact_files = [depfile] if self._artifact_cache and self.context.options.write_to_artifact_cache: deps = Dependencies(self._classes_dir) deps.load(depfile) vts_artifactfile_pairs = [] for single_vt in vt.versioned_targets: per_target_depfile = self.create_depfile_path([single_vt.target]) per_target_artifact_files = [per_target_depfile] for _, classes_by_source in deps.findclasses([single_vt.target]).items(): for _, classes in classes_by_source.items(): classfile_paths = [os.path.join(self._classes_dir, cls) for cls in classes] per_target_artifact_files.extend(classfile_paths) all_artifact_files.extend(classfile_paths) vts_artifactfile_pairs.append((single_vt, per_target_artifact_files)) vts_artifactfile_pairs.append((vt, all_artifact_files)) self.update_artifact_cache(vts_artifactfile_pairs) def create_depfile_path(self, targets): compilation_id = Target.maybe_readable_identify(targets) return os.path.join(self._depfile_dir, compilation_id) + '.dependencies' def calculate_sources(self, targets): sources = defaultdict(set) def collect_sources(target): src = (os.path.join(target.target_base, source) for source in target.sources if source.endswith('.java')) if src: sources[target].update(src) for target in targets: collect_sources(target) return sources, Target.identify(targets) def compile(self, classpath, sources, fingerprint, depfile): jmake_classpath = self.profile_classpath(self._jmake_profile) opts = [ '-classpath', ':'.join(classpath), '-d', self._classes_dir, '-pdb', os.path.join(self._classes_dir, '%s.dependencies.pdb' % fingerprint), ] compiler_classpath = self.profile_classpath(self._compiler_profile) opts.extend([ '-jcpath', ':'.join(compiler_classpath), '-jcmainclass', 'com.twitter.common.tools.Compiler', '-C-Tdependencyfile', '-C%s' % depfile, ]) opts.extend(map(lambda arg: '-C%s' % arg, self._javac_opts)) opts.extend(self._opts) return self.runjava_indivisible(_JMAKE_MAIN, classpath=jmake_classpath, opts=opts, args=sources, jvmargs=self._jvm_args, workunit_name='jmake') def check_artifact_cache(self, vts): # Special handling for java artifacts. cached_vts, uncached_vts = Task.check_artifact_cache(self, vts) if cached_vts: with self.context.new_workunit('split'): for vt in cached_vts: self.split_depfile(vt) return cached_vts, uncached_vts def split_depfile(self, vt): depfile = self.create_depfile_path(vt.targets) if len(vt.targets) <= 1 or not os.path.exists(depfile) or self.dry_run: return deps = Dependencies(self._classes_dir) deps.load(depfile) classes_by_source_by_target = deps.findclasses(vt.targets) for target in vt.targets: classes_by_source = classes_by_source_by_target.get(target, {}) dst_depfile = self.create_depfile_path([target]) dst_deps = Dependencies(self._classes_dir) for source, classes in classes_by_source.items(): src = os.path.join(target.target_base, source) dst_deps.add(src, classes) dst_deps.save(dst_depfile) # Merges individual target depfiles into a single one for all those targets. # Note that the merged depfile may be incomplete (e.g., if the previous build was aborted). # TODO: Is this even necessary? JMake will stomp these anyway on success. def merge_depfile(self, versioned_target_set): if len(versioned_target_set.targets) <= 1: return dst_depfile = self.create_depfile_path(versioned_target_set.targets) dst_deps = Dependencies(self._classes_dir) for target in versioned_target_set.targets: src_depfile = self.create_depfile_path([target]) if os.path.exists(src_depfile): src_deps = Dependencies(self._classes_dir) src_deps.load(src_depfile) dst_deps.merge(src_deps) dst_deps.save(dst_depfile) def write_processor_info(self, processor_info_file, processors): with safe_open(processor_info_file, 'w') as f: for processor in processors: f.write('%s\n' % processor)
def execute_single_compilation(self, versioned_targets, cp): compilation_id = Target.maybe_readable_identify( versioned_targets.targets) # TODO: Use the artifact cache. In flat mode we may want to look for the artifact for all targets, # not just the invalid ones, as it might be more likely to be present. Or we could look for both. if self._flatten: # If compiling in flat mode, we let all dependencies aggregate into a single well-known depfile. This # allows us to build different targets in different invocations without losing dependency information # from any of them. depfile = os.path.join(self._depfile_dir, 'dependencies.flat') else: # If not in flat mode, we let each compilation have its own depfile, to avoid quadratic behavior (each # compilation will read in the entire depfile, add its stuff to it and write it out again). depfile = os.path.join(self._depfile_dir, compilation_id) + '.dependencies' if not versioned_targets.valid: self.context.log.info('Compiling targets %s' % str(versioned_targets.targets)) sources_by_target, processors, fingerprint = self.calculate_sources( versioned_targets.targets) if sources_by_target: sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values()) if not sources: touch( depfile ) # Create an empty depfile, since downstream code may assume that one exists. self.context.log.warn( 'Skipping java compile for targets with no sources:\n %s' % '\n '.join(str(t) for t in sources_by_target.keys())) else: classpath = [ jar for conf, jar in cp if conf in self._confs ] result = self.compile(classpath, sources, fingerprint, depfile) if result != 0: default_message = 'Unexpected error - %s returned %d' % ( _JMAKE_MAIN, result) raise TaskError( _JMAKE_ERROR_CODES.get(result, default_message)) if processors: # Produce a monolithic apt processor service info file for further compilation rounds # and the unit test classpath. processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE) if os.path.exists(processor_info_file): with safe_open(processor_info_file, 'r') as f: for processor in f: processors.add(processor.strip()) self.write_processor_info(processor_info_file, processors) # Read in the deps created either just now or by a previous compiler run on these targets. deps = Dependencies(self._classes_dir) deps.load(depfile) self._deps.merge(deps)
class JavaCompile(NailgunTask): @staticmethod def _has_java_sources(target): return is_apt(target) or isinstance(target, JavaLibrary) or isinstance( target, JavaTests) @classmethod def setup_parser(cls, option_group, args, mkflag): NailgunTask.setup_parser(option_group, args, mkflag) option_group.add_option( mkflag("warnings"), mkflag("warnings", negate=True), dest="java_compile_warnings", default=True, action="callback", callback=mkflag.set_bool, help="[%default] Compile java code with all configured warnings " "enabled.") option_group.add_option( mkflag("flatten"), mkflag("flatten", negate=True), dest="java_compile_flatten", action="callback", callback=mkflag.set_bool, help="[%default] Compile java code for all dependencies in a " "single compilation.") def __init__(self, context): NailgunTask.__init__(self, context, workdir=context.config.get( 'java-compile', 'nailgun_dir')) self._flatten = \ context.options.java_compile_flatten if context.options.java_compile_flatten is not None else \ context.config.getbool('java-compile', 'default_to_flatten') workdir = context.config.get('java-compile', 'workdir') self._classes_dir = os.path.join(workdir, 'classes') self._resources_dir = os.path.join(workdir, 'resources') self._depfile_dir = os.path.join(workdir, 'depfiles') self._deps = Dependencies(self._classes_dir) self._jmake_profile = context.config.get('java-compile', 'jmake-profile') self._compiler_profile = context.config.get('java-compile', 'compiler-profile') self._args = context.config.getlist('java-compile', 'args') self._jvm_args = context.config.getlist('java-compile', 'jvm_args') if context.options.java_compile_warnings: self._args.extend( context.config.getlist('java-compile', 'warning_args')) else: self._args.extend( context.config.getlist('java-compile', 'no_warning_args')) self._confs = context.config.getlist('java-compile', 'confs') def product_type(self): return 'classes' def invalidate_for(self): return self._flatten def execute(self, targets): java_targets = filter(JavaCompile._has_java_sources, targets) if java_targets: safe_mkdir(self._classes_dir) safe_mkdir(self._depfile_dir) with self.context.state('classpath', []) as cp: for conf in self._confs: cp.insert(0, (conf, self._resources_dir)) cp.insert(0, (conf, self._classes_dir)) with self.invalidated(java_targets, invalidate_dependants=True) as invalidated: if self._flatten: # The deps go to a single well-known file, so we need only pass in the invalid targets here. self.execute_single_compilation( invalidated.combined_invalid_versioned_targets(), cp) else: # We must pass all targets,even valid ones, to execute_single_compilation(), so it can # track the per-target deps correctly. for vt in invalidated.all_versioned_targets(): self.execute_single_compilation(vt, cp) invalidated.update_versioned_target(vt) if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. for target, classes_by_source in self._deps.findclasses( java_targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. for target in java_targets: if is_apt(target) and target.processors: basedir = os.path.join(self._resources_dir, target.id) processor_info_file = os.path.join( basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE]) def execute_single_compilation(self, versioned_targets, cp): compilation_id = Target.maybe_readable_identify( versioned_targets.targets) # TODO: Use the artifact cache. In flat mode we may want to look for the artifact for all targets, # not just the invalid ones, as it might be more likely to be present. Or we could look for both. if self._flatten: # If compiling in flat mode, we let all dependencies aggregate into a single well-known depfile. This # allows us to build different targets in different invocations without losing dependency information # from any of them. depfile = os.path.join(self._depfile_dir, 'dependencies.flat') else: # If not in flat mode, we let each compilation have its own depfile, to avoid quadratic behavior (each # compilation will read in the entire depfile, add its stuff to it and write it out again). depfile = os.path.join(self._depfile_dir, compilation_id) + '.dependencies' if not versioned_targets.valid: self.context.log.info('Compiling targets %s' % str(versioned_targets.targets)) sources_by_target, processors, fingerprint = self.calculate_sources( versioned_targets.targets) if sources_by_target: sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values()) if not sources: touch( depfile ) # Create an empty depfile, since downstream code may assume that one exists. self.context.log.warn( 'Skipping java compile for targets with no sources:\n %s' % '\n '.join(str(t) for t in sources_by_target.keys())) else: classpath = [ jar for conf, jar in cp if conf in self._confs ] result = self.compile(classpath, sources, fingerprint, depfile) if result != 0: default_message = 'Unexpected error - %s returned %d' % ( _JMAKE_MAIN, result) raise TaskError( _JMAKE_ERROR_CODES.get(result, default_message)) if processors: # Produce a monolithic apt processor service info file for further compilation rounds # and the unit test classpath. processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE) if os.path.exists(processor_info_file): with safe_open(processor_info_file, 'r') as f: for processor in f: processors.add(processor.strip()) self.write_processor_info(processor_info_file, processors) # Read in the deps created either just now or by a previous compiler run on these targets. deps = Dependencies(self._classes_dir) deps.load(depfile) self._deps.merge(deps) def calculate_sources(self, targets): sources = defaultdict(set) processors = set() def collect_sources(target): src = (os.path.join(target.target_base, source) for source in target.sources if source.endswith('.java')) if src: sources[target].update(src) if is_apt(target) and target.processors: processors.update(target.processors) for target in targets: collect_sources(target) return sources, processors, Target.identify(targets) def compile(self, classpath, sources, fingerprint, depfile): jmake_classpath = nailgun_profile_classpath(self, self._jmake_profile) args = [ '-classpath', ':'.join(classpath), '-d', self._classes_dir, '-pdb', os.path.join(self._classes_dir, '%s.dependencies.pdb' % fingerprint), ] compiler_classpath = nailgun_profile_classpath(self, self._compiler_profile) args.extend([ '-jcpath', ':'.join(compiler_classpath), '-jcmainclass', 'com.twitter.common.tools.Compiler', '-C-Tdependencyfile', '-C%s' % depfile, ]) args.extend(self._args) args.extend(sources) log.debug('Executing: %s %s' % (_JMAKE_MAIN, ' '.join(args))) return self.runjava(_JMAKE_MAIN, classpath=jmake_classpath, args=args, jvmargs=self._jvm_args) def write_processor_info(self, processor_info_file, processors): with safe_open(processor_info_file, 'w') as f: for processor in processors: f.write('%s\n' % processor)
def execute_single_compilation(self, versioned_target_set, cp, upstream_analysis_caches): """Execute a single compilation, updating upstream_analysis_caches if needed.""" if self._flatten: compilation_id = 'flat' output_dir = self._flat_classes_dir else: compilation_id = Target.maybe_readable_identify( versioned_target_set.targets) # Each compilation must output to its own directory, so zinc can then associate those with the appropriate # analysis caches of previous compilations. We then copy the results out to the real output dir. output_dir = os.path.join(self._incremental_classes_dir, compilation_id) depfile = os.path.join(self._depfile_dir, compilation_id) + '.dependencies' analysis_cache = os.path.join(self._analysis_cache_dir, compilation_id) + '.analysis_cache' safe_mkdir(output_dir) if not versioned_target_set.valid: with self.check_artifact_cache( versioned_target_set, build_artifacts=[output_dir, depfile, analysis_cache], artifact_root=self._workdir) as needs_building: if needs_building: self.context.log.info('Compiling targets %s' % versioned_target_set.targets) sources_by_target = self.calculate_sources( versioned_target_set.targets) if sources_by_target: sources = reduce( lambda all, sources: all.union(sources), sources_by_target.values()) if not sources: touch( depfile ) # Create an empty depfile, since downstream code may assume that one exists. self.context.log.warn( 'Skipping scala compile for targets with no sources:\n %s' % '\n '.join( str(t) for t in sources_by_target.keys())) else: classpath = [ jar for conf, jar in cp if conf in self._confs ] result = self.compile(classpath, sources, output_dir, analysis_cache, upstream_analysis_caches, depfile) if result != 0: raise TaskError('%s returned %d' % (self._main, result)) # Note that the following post-processing steps must happen even for valid targets. # Read in the deps created either just now or by a previous compiler run on these targets. if self.context.products.isrequired('classes'): self.context.log.debug('Reading dependencies from ' + depfile) deps = Dependencies(output_dir) deps.load(depfile) genmap = self.context.products.get('classes') for target, classes_by_source in deps.findclasses( versioned_target_set.targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, output_dir, classes) genmap.add(target, output_dir, classes) # TODO(John Sirois): Map target.resources in the same way # Create and Map scala plugin info files to the owning targets. for target in versioned_target_set.targets: if is_scalac_plugin(target) and target.classname: basedir = self.write_plugin_info(target) genmap.add(target, basedir, [_PLUGIN_INFO_FILE]) # Update the upstream analysis map. analysis_cache_parts = os.path.split(analysis_cache) if not upstream_analysis_caches.has(output_dir): # A previous chunk might have already updated this. It is certainly possible for a later chunk to # independently depend on some target that a previous chunk already built. upstream_analysis_caches.add(output_dir, analysis_cache_parts[0], [analysis_cache_parts[1]]) # Update the classpath. with self.context.state('classpath', []) as cp: for conf in self._confs: cp.insert(0, (conf, output_dir))
class JavaCompile(NailgunTask): @staticmethod def _has_java_sources(target): return is_apt(target) or isinstance(target, JavaLibrary) or isinstance( target, JavaTests) @classmethod def setup_parser(cls, option_group, args, mkflag): NailgunTask.setup_parser(option_group, args, mkflag) option_group.add_option( mkflag("warnings"), mkflag("warnings", negate=True), dest="java_compile_warnings", default=True, action="callback", callback=mkflag.set_bool, help="[%default] Compile java code with all configured warnings " "enabled.") option_group.add_option(mkflag("partition-size-hint"), dest="java_compile_partition_size_hint", action="store", type="int", default=-1, help="Roughly how many source files to attempt to compile together. Set to a large number to compile "\ "all sources together. Set this to 0 to compile target-by-target. Default is set in pants.ini.") def __init__(self, context): NailgunTask.__init__(self, context, workdir=context.config.get( 'java-compile', 'nailgun_dir')) self._partition_size_hint = \ context.options.java_compile_partition_size_hint \ if context.options.java_compile_partition_size_hint != -1 \ else context.config.getint('java-compile', 'partition_size_hint') workdir = context.config.get('java-compile', 'workdir') self._classes_dir = os.path.join(workdir, 'classes') self._resources_dir = os.path.join(workdir, 'resources') self._depfile_dir = os.path.join(workdir, 'depfiles') self._deps = Dependencies(self._classes_dir) self._jmake_profile = context.config.get('java-compile', 'jmake-profile') self._compiler_profile = context.config.get('java-compile', 'compiler-profile') self._args = context.config.getlist('java-compile', 'args') self._jvm_args = context.config.getlist('java-compile', 'jvm_args') if context.options.java_compile_warnings: self._args.extend( context.config.getlist('java-compile', 'warning_args')) else: self._args.extend( context.config.getlist('java-compile', 'no_warning_args')) self._confs = context.config.getlist('java-compile', 'confs') def product_type(self): return 'classes' def can_dry_run(self): return True def execute(self, targets): java_targets = filter(JavaCompile._has_java_sources, targets) if java_targets: safe_mkdir(self._classes_dir) safe_mkdir(self._depfile_dir) with self.context.state('classpath', []) as cp: for conf in self._confs: cp.insert(0, (conf, self._resources_dir)) cp.insert(0, (conf, self._classes_dir)) with self.invalidated(java_targets, invalidate_dependants=True, partition_size_hint=self._partition_size_hint ) as invalidation_check: for vt in invalidation_check.all_vts: if vt.valid: # Don't compile, just post-process. self.post_process(vt) for vt in invalidation_check.invalid_vts_partitioned: # Compile, using partitions for efficiency. self.execute_single_compilation(vt, cp) if not self.dry_run: vt.update() if not self.dry_run: if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. for target, classes_by_source in self._deps.findclasses( java_targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. for target in java_targets: if is_apt(target) and target.processors: basedir = os.path.join(self._resources_dir, target.id) processor_info_file = os.path.join( basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE]) def execute_single_compilation(self, versioned_targets, cp): # TODO: Use the artifact cache. depfile = self.create_depfile_path(versioned_targets.targets) if not versioned_targets.valid: self.merge_depfile( versioned_targets) # Get what we can from previous builds. self.context.log.info('Compiling targets %s' % str(versioned_targets.targets)) sources_by_target, processors, fingerprint = self.calculate_sources( versioned_targets.targets) if sources_by_target: sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values()) if not sources: self.context.log.warn( 'Skipping java compile for targets with no sources:\n %s' % '\n '.join(str(t) for t in sources_by_target.keys())) else: classpath = [ jar for conf, jar in cp if conf in self._confs ] result = self.compile(classpath, sources, fingerprint, depfile) if result != 0: default_message = 'Unexpected error - %s returned %d' % ( _JMAKE_MAIN, result) raise TaskError( _JMAKE_ERROR_CODES.get(result, default_message)) if processors and not self.dry_run: # Produce a monolithic apt processor service info file for further compilation rounds # and the unit test classpath. processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE) if os.path.exists(processor_info_file): with safe_open(processor_info_file, 'r') as f: for processor in f: processors.add(processor.strip()) self.write_processor_info(processor_info_file, processors) self.post_process(versioned_targets) # Post-processing steps that must happen even for valid targets. def post_process(self, versioned_targets): depfile = self.create_depfile_path(versioned_targets.targets) if not self.dry_run and os.path.exists(depfile): # Read in the deps created either just now or by a previous compiler run on these targets. deps = Dependencies(self._classes_dir) deps.load(depfile) self.split_depfile(deps, versioned_targets) self._deps.merge(deps) def create_depfile_path(self, targets): compilation_id = Target.maybe_readable_identify(targets) return os.path.join(self._depfile_dir, compilation_id) + '.dependencies' def calculate_sources(self, targets): sources = defaultdict(set) processors = set() def collect_sources(target): src = (os.path.join(target.target_base, source) for source in target.sources if source.endswith('.java')) if src: sources[target].update(src) if is_apt(target) and target.processors: processors.update(target.processors) for target in targets: collect_sources(target) return sources, processors, Target.identify(targets) def compile(self, classpath, sources, fingerprint, depfile): jmake_classpath = nailgun_profile_classpath(self, self._jmake_profile) args = [ '-classpath', ':'.join(classpath), '-d', self._classes_dir, '-pdb', os.path.join(self._classes_dir, '%s.dependencies.pdb' % fingerprint), ] compiler_classpath = nailgun_profile_classpath(self, self._compiler_profile) args.extend([ '-jcpath', ':'.join(compiler_classpath), '-jcmainclass', 'com.twitter.common.tools.Compiler', '-C-Tdependencyfile', '-C%s' % depfile, ]) args.extend(self._args) args.extend(sources) log.debug('Executing: %s %s' % (_JMAKE_MAIN, ' '.join(args))) return self.runjava(_JMAKE_MAIN, classpath=jmake_classpath, args=args, jvmargs=self._jvm_args) def split_depfile(self, deps, versioned_target_set): if len(versioned_target_set.targets) <= 1: return classes_by_source_by_target = deps.findclasses( versioned_target_set.targets) for target in versioned_target_set.targets: classes_by_source = classes_by_source_by_target.get(target, {}) dst_depfile = self.create_depfile_path([target]) dst_deps = Dependencies(self._classes_dir) for source, classes in classes_by_source.items(): src = os.path.join(target.target_base, source) dst_deps.add(src, classes) dst_deps.save(dst_depfile) # Merges individual target depfiles into a single one for all those targets. # Note that the merged depfile may be incomplete (e.g., if the previous build was aborted). # TODO: Is this even necessary? JMake will stomp these anyway on success. def merge_depfile(self, versioned_target_set): if len(versioned_target_set.targets) <= 1: return dst_depfile = self.create_depfile_path(versioned_target_set.targets) dst_deps = Dependencies(self._classes_dir) for target in versioned_target_set.targets: src_depfile = self.create_depfile_path([target]) if os.path.exists(src_depfile): src_deps = Dependencies(self._classes_dir) src_deps.load(src_depfile) dst_deps.merge(src_deps) dst_deps.save(dst_depfile) def write_processor_info(self, processor_info_file, processors): with safe_open(processor_info_file, 'w') as f: for processor in processors: f.write('%s\n' % processor)
class JavaCompile(NailgunTask): @classmethod def setup_parser(cls, option_group, args, mkflag): NailgunTask.setup_parser(option_group, args, mkflag) option_group.add_option(mkflag("warnings"), mkflag("warnings", negate=True), dest="java_compile_warnings", default=True, action="callback", callback=mkflag.set_bool, help="[%default] Compile java code with all configured warnings " "enabled.") option_group.add_option(mkflag("args"), dest="java_compile_args", action="append", help="Pass these extra args to javac.") option_group.add_option(mkflag("partition-size-hint"), dest="java_compile_partition_size_hint", action="store", type="int", default=-1, help="Roughly how many source files to attempt to compile together. Set" " to a large number to compile all sources together. Set this to 0" " to compile target-by-target. Default is set in pants.ini.") def __init__(self, context): NailgunTask.__init__(self, context, workdir=context.config.get('java-compile', 'nailgun_dir')) if context.options.java_compile_partition_size_hint != -1: self._partition_size_hint = context.options.java_compile_partition_size_hint else: self._partition_size_hint = context.config.getint('java-compile', 'partition_size_hint', default=1000) workdir = context.config.get('java-compile', 'workdir') self._classes_dir = os.path.join(workdir, 'classes') self._resources_dir = os.path.join(workdir, 'resources') self._depfile_dir = os.path.join(workdir, 'depfiles') self._deps = Dependencies(self._classes_dir) self._jmake_profile = context.config.get('java-compile', 'jmake-profile') self._compiler_profile = context.config.get('java-compile', 'compiler-profile') self._opts = context.config.getlist('java-compile', 'args') self._jvm_args = context.config.getlist('java-compile', 'jvm_args') self._javac_opts = [] if context.options.java_compile_args: for arg in context.options.java_compile_args: self._javac_opts.extend(shlex.split(arg)) else: self._javac_opts.extend(context.config.getlist('java-compile', 'javac_args', default=[])) if context.options.java_compile_warnings: self._opts.extend(context.config.getlist('java-compile', 'warning_args')) else: self._opts.extend(context.config.getlist('java-compile', 'no_warning_args')) self._confs = context.config.getlist('java-compile', 'confs') self.context.products.require_data('exclusives_groups') # The artifact cache to read from/write to. artifact_cache_spec = context.config.getlist('java-compile', 'artifact_caches', default=[]) self.setup_artifact_cache(artifact_cache_spec) def product_type(self): return 'classes' def can_dry_run(self): return True def execute(self, targets): java_targets = filter(_is_java, targets) if java_targets: safe_mkdir(self._classes_dir) safe_mkdir(self._depfile_dir) egroups = self.context.products.get_data('exclusives_groups') group_id = egroups.get_group_key_for_target(java_targets[0]) for conf in self._confs: egroups.update_compatible_classpaths(group_id, [(conf, self._resources_dir)]) egroups.update_compatible_classpaths(group_id, [(conf, self._classes_dir)]) with self.invalidated(java_targets, invalidate_dependents=True, partition_size_hint=self._partition_size_hint) as invalidation_check: for vt in invalidation_check.invalid_vts_partitioned: # Compile, using partitions for efficiency. exclusives_classpath = egroups.get_classpath_for_group(group_id) self.execute_single_compilation(vt, exclusives_classpath) if not self.dry_run: vt.update() for vt in invalidation_check.all_vts: depfile = self.create_depfile_path(vt.targets) if not self.dry_run and os.path.exists(depfile): # Read in the deps created either just now or by a previous run on these targets. deps = Dependencies(self._classes_dir) deps.load(depfile) self._deps.merge(deps) if not self.dry_run: if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. for target, classes_by_source in self._deps.findclasses(java_targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. for target in java_targets: if is_apt(target) and target.processors: basedir = os.path.join(self._resources_dir, Target.maybe_readable_identify([target])) processor_info_file = os.path.join(basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE]) # Produce a monolithic apt processor service info file for further compilation rounds # and the unit test classpath. all_processors = set() for target in java_targets: if is_apt(target) and target.processors: all_processors.update(target.processors) processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE) if os.path.exists(processor_info_file): with safe_open(processor_info_file, 'r') as f: for processor in f: all_processors.add(processor.strip()) self.write_processor_info(processor_info_file, all_processors) def execute_single_compilation(self, vt, cp): depfile = self.create_depfile_path(vt.targets) self.merge_depfile(vt) # Get what we can from previous builds. sources_by_target, fingerprint = self.calculate_sources(vt.targets) if sources_by_target: sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values()) if not sources: self.context.log.warn('Skipping java compile for targets with no sources:\n %s' % '\n '.join(str(t) for t in sources_by_target.keys())) else: classpath = [jar for conf, jar in cp if conf in self._confs] result = self.compile(classpath, sources, fingerprint, depfile) if result != 0: default_message = 'Unexpected error - %s returned %d' % (_JMAKE_MAIN, result) raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message)) self.split_depfile(vt) all_artifact_files = [depfile] if self._artifact_cache and self.context.options.write_to_artifact_cache: deps = Dependencies(self._classes_dir) deps.load(depfile) vts_artifactfile_pairs = [] for single_vt in vt.versioned_targets: per_target_depfile = self.create_depfile_path([single_vt.target]) per_target_artifact_files = [per_target_depfile] for _, classes_by_source in deps.findclasses([single_vt.target]).items(): for _, classes in classes_by_source.items(): classfile_paths = [os.path.join(self._classes_dir, cls) for cls in classes] per_target_artifact_files.extend(classfile_paths) all_artifact_files.extend(classfile_paths) vts_artifactfile_pairs.append((single_vt, per_target_artifact_files)) vts_artifactfile_pairs.append((vt, all_artifact_files)) self.update_artifact_cache(vts_artifactfile_pairs) def create_depfile_path(self, targets): compilation_id = Target.maybe_readable_identify(targets) return os.path.join(self._depfile_dir, compilation_id) + '.dependencies' def calculate_sources(self, targets): sources = defaultdict(set) def collect_sources(target): src = (os.path.join(target.target_base, source) for source in target.sources if source.endswith('.java')) if src: sources[target].update(src) for target in targets: collect_sources(target) return sources, Target.identify(targets) def compile(self, classpath, sources, fingerprint, depfile): jmake_classpath = self.profile_classpath(self._jmake_profile) opts = [ '-classpath', ':'.join(classpath), '-d', self._classes_dir, '-pdb', os.path.join(self._classes_dir, '%s.dependencies.pdb' % fingerprint), ] compiler_classpath = self.profile_classpath(self._compiler_profile) opts.extend([ '-jcpath', ':'.join(compiler_classpath), '-jcmainclass', 'com.twitter.common.tools.Compiler', '-C-Tdependencyfile', '-C%s' % depfile, ]) opts.extend(map(lambda arg: '-C%s' % arg, self._javac_opts)) opts.extend(self._opts) return self.runjava_indivisible(_JMAKE_MAIN, classpath=jmake_classpath, opts=opts, args=sources, jvmargs=self._jvm_args, workunit_name='jmake', workunit_labels=[WorkUnit.COMPILER]) def check_artifact_cache(self, vts): # Special handling for java artifacts. cached_vts, uncached_vts = Task.check_artifact_cache(self, vts) if cached_vts: with self.context.new_workunit('split'): for vt in cached_vts: self.split_depfile(vt) return cached_vts, uncached_vts def split_depfile(self, vt): depfile = self.create_depfile_path(vt.targets) if len(vt.targets) <= 1 or not os.path.exists(depfile) or self.dry_run: return deps = Dependencies(self._classes_dir) deps.load(depfile) classes_by_source_by_target = deps.findclasses(vt.targets) for target in vt.targets: classes_by_source = classes_by_source_by_target.get(target, {}) dst_depfile = self.create_depfile_path([target]) dst_deps = Dependencies(self._classes_dir) for source, classes in classes_by_source.items(): src = os.path.join(target.target_base, source) dst_deps.add(src, classes) dst_deps.save(dst_depfile) # Merges individual target depfiles into a single one for all those targets. # Note that the merged depfile may be incomplete (e.g., if the previous build was aborted). # TODO: Is this even necessary? JMake will stomp these anyway on success. def merge_depfile(self, versioned_target_set): if len(versioned_target_set.targets) <= 1: return dst_depfile = self.create_depfile_path(versioned_target_set.targets) dst_deps = Dependencies(self._classes_dir) for target in versioned_target_set.targets: src_depfile = self.create_depfile_path([target]) if os.path.exists(src_depfile): src_deps = Dependencies(self._classes_dir) src_deps.load(src_depfile) dst_deps.merge(src_deps) dst_deps.save(dst_depfile) def write_processor_info(self, processor_info_file, processors): with safe_open(processor_info_file, 'w') as f: for processor in processors: f.write('%s\n' % processor)
def execute(self, targets): java_targets = filter(_is_java, targets) if java_targets: safe_mkdir(self._classes_dir) safe_mkdir(self._depfile_dir) with self.context.state('classpath', []) as cp: for conf in self._confs: cp.insert(0, (conf, self._resources_dir)) cp.insert(0, (conf, self._classes_dir)) with self.invalidated(java_targets, invalidate_dependents=True, partition_size_hint=self._partition_size_hint ) as invalidation_check: for vt in invalidation_check.invalid_vts_partitioned: # Compile, using partitions for efficiency. self.execute_single_compilation(vt, cp) if not self.dry_run: vt.update() for vt in invalidation_check.all_vts: depfile = self.create_depfile_path(vt.targets) if not self.dry_run and os.path.exists(depfile): # Read in the deps created either just now or by a previous run on these targets. deps = Dependencies(self._classes_dir) deps.load(depfile) self._deps.merge(deps) if not self.dry_run: if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. for target, classes_by_source in self._deps.findclasses( java_targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. for target in java_targets: if is_apt(target) and target.processors: basedir = os.path.join( self._resources_dir, Target.maybe_readable_identify([target])) processor_info_file = os.path.join( basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE]) # Produce a monolithic apt processor service info file for further compilation rounds # and the unit test classpath. all_processors = set() for target in java_targets: if is_apt(target) and target.processors: all_processors.update(target.processors) processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE) if os.path.exists(processor_info_file): with safe_open(processor_info_file, 'r') as f: for processor in f: all_processors.add(processor.strip()) self.write_processor_info(processor_info_file, all_processors)