def execute(self, targets): java_targets = filter(_is_java, targets) if java_targets: safe_mkdir(self._classes_dir) safe_mkdir(self._depfile_dir) egroups = self.context.products.get_data('exclusives_groups') group_id = egroups.get_group_key_for_target(java_targets[0]) for conf in self._confs: egroups.update_compatible_classpaths(group_id, [(conf, self._resources_dir)]) egroups.update_compatible_classpaths(group_id, [(conf, self._classes_dir)]) with self.invalidated(java_targets, invalidate_dependents=True, partition_size_hint=self._partition_size_hint) as invalidation_check: for vt in invalidation_check.invalid_vts_partitioned: # Compile, using partitions for efficiency. exclusives_classpath = egroups.get_classpath_for_group(group_id) self.execute_single_compilation(vt, exclusives_classpath) if not self.dry_run: vt.update() for vt in invalidation_check.all_vts: depfile = self.create_depfile_path(vt.targets) if not self.dry_run and os.path.exists(depfile): # Read in the deps created either just now or by a previous run on these targets. deps = Dependencies(self._classes_dir) deps.load(depfile) self._deps.merge(deps) if not self.dry_run: if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. for target, classes_by_source in self._deps.findclasses(java_targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. for target in java_targets: if is_apt(target) and target.processors: basedir = os.path.join(self._resources_dir, Target.maybe_readable_identify([target])) processor_info_file = os.path.join(basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE]) # Produce a monolithic apt processor service info file for further compilation rounds # and the unit test classpath. all_processors = set() for target in java_targets: if is_apt(target) and target.processors: all_processors.update(target.processors) processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE) if os.path.exists(processor_info_file): with safe_open(processor_info_file, 'r') as f: for processor in f: all_processors.add(processor.strip()) self.write_processor_info(processor_info_file, all_processors)
def extract_target(java_targets, is_transitive, name = None): meta_target = bang.extract_target(java_targets, name) internal_deps, jar_deps = _extract_target(meta_target, is_transitive, lambda target: is_apt(target)) # TODO(John Sirois): make an empty source set work in ant/compile.xml sources = [ '__no_source__' ] all_deps = OrderedSet() all_deps.update(internal_deps) all_deps.update(jar_deps) if is_java(meta_target): return JavaLibrary('ide', sources, dependencies = all_deps, excludes = meta_target.excludes, is_meta = True) elif is_scala(meta_target): return ScalaLibrary('ide', sources, dependencies = all_deps, excludes = meta_target.excludes, is_meta = True) else: raise TypeError("Cannot generate IDE configuration for targets: %s" % java_targets)
def execute(self, targets): java_targets = filter(JavaCompile._is_java, reversed(InternalTarget.sort_targets(targets))) if java_targets: safe_mkdir(self._classes_dir) safe_mkdir(self._depfile_dir) with self.context.state('classpath', []) as cp: for conf in self._confs: cp.insert(0, (conf, self._resources_dir)) cp.insert(0, (conf, self._classes_dir)) if not self._flatten: for target in java_targets: self.execute_single_compilation([target], cp) else: self.execute_single_compilation(java_targets, cp) if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. for target, classes_by_source in self._deps.findclasses(java_targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. for target in java_targets: if is_apt(target) and target.processors: basedir = os.path.join(self._resources_dir, target.id) processor_info_file = os.path.join(basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE])
def collect_sources(target): src = (os.path.join(target.target_base, source) for source in target.sources if source.endswith('.java')) if src: sources[target].update(src) if is_apt(target) and target.processors: processors.update(target.processors)
def execute(self, targets): if not self._flatten and len(targets) > 1: topologically_sorted_targets = filter(JavaCompile._is_java, reversed(InternalTarget.sort_targets(targets))) for target in topologically_sorted_targets: self.execute([target]) return self.context.log.info('Compiling targets %s' % str(targets)) java_targets = filter(JavaCompile._is_java, targets) if java_targets: with self.context.state('classpath', []) as cp: for conf in self._confs: cp.insert(0, (conf, self._resources_dir)) cp.insert(0, (conf, self._classes_dir)) with self.changed(java_targets, invalidate_dependants=True) as changed: sources_by_target, processors, fingerprint = self.calculate_sources(changed) if sources_by_target: sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values()) if not sources: self.context.log.warn('Skipping java compile for targets with no sources:\n %s' % '\n '.join(str(t) for t in sources_by_target.keys())) else: classpath = [jar for conf, jar in cp if conf in self._confs] result = self.compile(classpath, sources, fingerprint) if result != 0: default_message = 'Unexpected error - %s returned %d' % (_JMAKE_MAIN, result) raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message)) if processors: # Produce a monolithic apt processor service info file for further compilation rounds # and the unit test classpath. processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE) if os.path.exists(processor_info_file): with safe_open(processor_info_file, 'r') as f: for processor in f: processors.add(processor.strip()) self.write_processor_info(processor_info_file, processors) if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. dependencies = Dependencies(self._classes_dir, self._dependencies_file) for target, classes_by_source in dependencies.findclasses(targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. for target in targets: if is_apt(target) and target.processors: basedir = os.path.join(self._resources_dir, target.id) processor_info_file = os.path.join(basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE])
def is_cp(target): return ( is_codegen(target) # Some IDEs need annotation processors pre-compiled, others are smart enough to detect and # proceed in 2 compile rounds or is_apt(target) or (self.skip_java and is_java(target)) or (self.skip_scala and is_scala(target)) or (self.intransitive and target not in self.context.target_roots))
def is_cp(target): return ( target.is_codegen # Some IDEs need annotation processors pre-compiled, others are smart enough to detect and # proceed in 2 compile rounds or is_apt(target) or (self.skip_java and is_java(target)) or (self.skip_scala and is_scala(target)) )
def is_cp(target): return ( is_codegen(target) # Some IDEs need annotation processors pre-compiled, others are smart enough to detect and # proceed in 2 compile rounds or is_apt(target) or (self.skip_java and is_java(target)) or (self.skip_scala and is_scala(target)) or (self.intransitive and target not in self.context.target_roots) )
def execute(self, targets): java_targets = filter(lambda t: has_sources(t, '.java'), targets) if not java_targets: return # Get the exclusives group for the targets to compile. # Group guarantees that they'll be a single exclusives key for them. egroups = self.context.products.get_data('exclusives_groups') group_id = egroups.get_group_key_for_target(java_targets[0]) # Add classes and resource dirs to the classpath for us and for downstream tasks. for conf in self._confs: egroups.update_compatible_classpaths(group_id, [(conf, self._classes_dir)]) egroups.update_compatible_classpaths(group_id, [(conf, self._resources_dir)]) # Get the classpath generated by upstream JVM tasks (including previous calls to execute()). cp = egroups.get_classpath_for_group(group_id) with self.invalidated(java_targets, invalidate_dependents=True, partition_size_hint=self._partition_size_hint) as invalidation_check: if not self.dry_run: for vts in invalidation_check.invalid_vts_partitioned: # Compile, using partitions for efficiency. sources_by_target = self._process_target_partition(vts, cp) # TODO: Check for missing dependencies. See ScalaCompile for an example. # Will require figuring out what the actual deps of a class file are. vts.update() if self.get_artifact_cache() and self.context.options.write_to_artifact_cache: self._write_to_artifact_cache(vts, sources_by_target) # Provide the target->class and source->class mappings to downstream tasks if needed. if self.context.products.isrequired('classes'): if os.path.exists(self._depfile): sources_by_target = self._compute_sources_by_target(java_targets) deps = Dependencies(self._classes_dir) deps.load(self._depfile) self._add_all_products_to_genmap(sources_by_target, deps.classes_by_source) # Produce a monolithic apt processor service info file for further compilation rounds # and the unit test classpath. all_processors = set() for target in java_targets: if is_apt(target) and target.processors: all_processors.update(target.processors) processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE) if os.path.exists(processor_info_file): with safe_open(processor_info_file, 'r') as f: for processor in f: all_processors.add(processor.strip()) self.write_processor_info(processor_info_file, all_processors)
def _add_all_products_to_genmap(self, sources_by_target, classes_by_source): # Map generated classes to the owning targets and sources. genmap = self.context.products.get('classes') for target, sources in sources_by_target.items(): for source in sources: classes = classes_by_source.get(source, []) relsrc = os.path.relpath(source, target.target_base) genmap.add(relsrc, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. if is_apt(target) and target.processors: basedir = os.path.join(self._resources_dir, Target.maybe_readable_identify([target])) processor_info_file = os.path.join(basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE])
def execute(self, targets): java_targets = filter(JavaCompile._has_java_sources, targets) if java_targets: safe_mkdir(self._classes_dir) safe_mkdir(self._depfile_dir) with self.context.state('classpath', []) as cp: for conf in self._confs: cp.insert(0, (conf, self._resources_dir)) cp.insert(0, (conf, self._classes_dir)) with self.invalidated(java_targets, invalidate_dependants=True, partition_size_hint=self._partition_size_hint ) as invalidation_check: for vt in invalidation_check.all_vts: if vt.valid: # Don't compile, just post-process. self.post_process(vt) for vt in invalidation_check.invalid_vts_partitioned: # Compile, using partitions for efficiency. self.execute_single_compilation(vt, cp) if not self.dry_run: vt.update() if not self.dry_run: if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. for target, classes_by_source in self._deps.findclasses( java_targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. for target in java_targets: if is_apt(target) and target.processors: basedir = os.path.join(self._resources_dir, target.id) processor_info_file = os.path.join( basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE])
def execute(self, targets): java_targets = filter(JavaCompile._has_java_sources, targets) if java_targets: safe_mkdir(self._classes_dir) safe_mkdir(self._depfile_dir) with self.context.state('classpath', []) as cp: for conf in self._confs: cp.insert(0, (conf, self._resources_dir)) cp.insert(0, (conf, self._classes_dir)) with self.invalidated(java_targets, invalidate_dependents=True, partition_size_hint=self._partition_size_hint) as invalidation_check: for vt in invalidation_check.invalid_vts_partitioned: # Compile, using partitions for efficiency. self.execute_single_compilation(vt, cp) if not self.dry_run: vt.update() for vt in invalidation_check.all_vts: depfile = self.create_depfile_path(vt.targets) if not self.dry_run and os.path.exists(depfile): # Read in the deps created either just now or by a previous run on these targets. deps = Dependencies(self._classes_dir) deps.load(depfile) self._deps.merge(deps) if not self.dry_run: if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. for target, classes_by_source in self._deps.findclasses(java_targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. for target in java_targets: if is_apt(target) and target.processors: basedir = os.path.join(self._resources_dir, target.id) processor_info_file = os.path.join(basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE])
def execute(self, targets): java_targets = filter(JavaCompile._has_java_sources, targets) if java_targets: safe_mkdir(self._classes_dir) safe_mkdir(self._depfile_dir) with self.context.state('classpath', []) as cp: for conf in self._confs: cp.insert(0, (conf, self._resources_dir)) cp.insert(0, (conf, self._classes_dir)) with self.invalidated(java_targets, invalidate_dependants=True) as invalidated: if self._flatten: # The deps go to a single well-known file, so we need only pass in the invalid targets here. self.execute_single_compilation( invalidated.combined_invalid_versioned_targets(), cp) else: # We must pass all targets,even valid ones, to execute_single_compilation(), so it can # track the per-target deps correctly. for vt in invalidated.all_versioned_targets(): self.execute_single_compilation(vt, cp) invalidated.update_versioned_target(vt) if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. for target, classes_by_source in self._deps.findclasses( java_targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. for target in java_targets: if is_apt(target) and target.processors: basedir = os.path.join(self._resources_dir, target.id) processor_info_file = os.path.join( basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE])
def execute(self, targets): java_targets = filter(JavaCompile._has_java_sources, targets) if java_targets: safe_mkdir(self._classes_dir) safe_mkdir(self._depfile_dir) with self.context.state('classpath', []) as cp: for conf in self._confs: cp.insert(0, (conf, self._resources_dir)) cp.insert(0, (conf, self._classes_dir)) with self.invalidated(java_targets, invalidate_dependants=True) as invalidated: if self._flatten: # The deps go to a single well-known file, so we need only pass in the invalid targets here. self.execute_single_compilation(invalidated.combined_invalid_versioned_targets(), cp) else: # We must pass all targets,even valid ones, to execute_single_compilation(), so it can # track the per-target deps correctly. for vt in invalidated.all_versioned_targets(): self.execute_single_compilation(vt, cp) invalidated.update_versioned_target(vt) if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. for target, classes_by_source in self._deps.findclasses(java_targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. for target in java_targets: if is_apt(target) and target.processors: basedir = os.path.join(self._resources_dir, target.id) processor_info_file = os.path.join(basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE])
def sift_targets(target, add_deps = False): if target not in visited: visited.add(target) is_needed_on_ide_classpath = add_deps or target.is_codegen or is_apt(target) if is_needed_on_ide_classpath: codegen_graph[0].internal_dependencies.add(target) else: for jar_dependency in target.jar_dependencies: if jar_dependency.rev: if is_transitive(target): jar_deps.add(jar_dependency) else: jar_deps.add(copy(jar_dependency).intransitive()) if is_needed_on_ide_classpath: codegen_graph.appendleft(target) for internal_target in list(target.internal_dependencies): target.internal_dependencies.discard(internal_target) sift_targets(internal_target, is_needed_on_ide_classpath) if is_needed_on_ide_classpath: codegen_graph.popleft()
def test_apt(t): if is_apt(t): apt_targets.append(t)
def is_java(target): return is_apt(target) or isinstance(target, JavaLibrary) or isinstance(target, JavaTests)
def _is_java(target): return is_apt(target) or isinstance(target, JavaLibrary) or isinstance( target, JavaTests)
def execute(self, targets): if not self._flatten and len(targets) > 1: topologically_sorted_targets = filter( JavaCompile._is_java, reversed(InternalTarget.sort_targets(targets))) for target in topologically_sorted_targets: self.execute([target]) return self.context.log.info('Compiling targets %s' % str(targets)) java_targets = filter(JavaCompile._is_java, targets) if java_targets: with self.context.state('classpath', []) as cp: for conf in self._confs: cp.insert(0, (conf, self._resources_dir)) cp.insert(0, (conf, self._classes_dir)) with self.changed(java_targets, invalidate_dependants=True) as changed: sources_by_target, processors, fingerprint = self.calculate_sources( changed) if sources_by_target: sources = reduce( lambda all, sources: all.union(sources), sources_by_target.values()) if not sources: self.context.log.warn( 'Skipping java compile for targets with no sources:\n %s' % '\n '.join( str(t) for t in sources_by_target.keys())) else: classpath = [ jar for conf, jar in cp if conf in self._confs ] result = self.compile(classpath, sources, fingerprint) if result != 0: default_message = 'Unexpected error - %s returned %d' % ( _JMAKE_MAIN, result) raise TaskError( _JMAKE_ERROR_CODES.get( result, default_message)) if processors: # Produce a monolithic apt processor service info file for further compilation rounds # and the unit test classpath. processor_info_file = os.path.join( self._classes_dir, _PROCESSOR_INFO_FILE) if os.path.exists(processor_info_file): with safe_open(processor_info_file, 'r') as f: for processor in f: processors.add(processor.strip()) self.write_processor_info(processor_info_file, processors) if self.context.products.isrequired('classes'): genmap = self.context.products.get('classes') # Map generated classes to the owning targets and sources. dependencies = Dependencies(self._classes_dir, self._dependencies_file) for target, classes_by_source in dependencies.findclasses( targets).items(): for source, classes in classes_by_source.items(): genmap.add(source, self._classes_dir, classes) genmap.add(target, self._classes_dir, classes) # TODO(John Sirois): Map target.resources in the same way # 'Map' (rewrite) annotation processor service info files to the owning targets. for target in targets: if is_apt(target) and target.processors: basedir = os.path.join(self._resources_dir, target.id) processor_info_file = os.path.join( basedir, _PROCESSOR_INFO_FILE) self.write_processor_info(processor_info_file, target.processors) genmap.add(target, basedir, [_PROCESSOR_INFO_FILE])
def is_cp(target): return target.is_codegen \ or is_apt(target) \ or (skip_java and is_java(target)) \ or (skip_scala and is_scala(target))
def _has_java_sources(target): return is_apt(target) or isinstance(target, JavaLibrary) or isinstance(target, JavaTests)
def _has_java_sources(target): return is_apt(target) or isinstance(target, JavaLibrary) or isinstance( target, JavaTests)