def run(self, lock): with self.check_errors("Target contains a dependency cycle") as error: for target in self.targets: try: InternalTarget.check_cycles(target) except InternalTarget.CycleException as e: error(target.id) timer = None if self.options.time: class Timer(object): def now(self): return time.time() def log(self, message): print(message) timer = Timer() logger = None if self.options.log or self.options.log_level: from twitter.common.log import init from twitter.common.log.options import LogOptions LogOptions.set_stderr_log_level((self.options.log_level or 'info').upper()) logdir = self.options.logdir or self.config.get('goals', 'logdir', default=None) if logdir: safe_mkdir(logdir) LogOptions.set_log_dir(logdir) init('goals') else: init() logger = log if self.options.recursive_directory: log.warn('--all-recursive is deprecated, use a target spec with the form [dir]:: instead') for dir in self.options.recursive_directory: self.add_target_recursive(dir) if self.options.target_directory: log.warn('--all is deprecated, use a target spec with the form [dir]: instead') for dir in self.options.target_directory: self.add_target_directory(dir) context = Context(self.config, self.options, self.targets, lock=lock, log=logger) unknown = [] for phase in self.phases: if not phase.goals(): unknown.append(phase) if unknown: print('Unknown goal(s): %s' % ' '.join(phase.name for phase in unknown)) print('') return Phase.execute(context, 'goals') if logger: logger.debug('Operating on targets: %s', self.targets) return Phase.attempt(context, self.phases, timer=timer)
def testSort(self): a = MockTarget('a', []) b = MockTarget('b', [a]) c = MockTarget('c', [b]) d = MockTarget('d', [c, a]) e = MockTarget('e', [d]) self.assertEquals(InternalTarget.sort_targets([a,b,c,d,e]), [e,d,c,b,a]) self.assertEquals(InternalTarget.sort_targets([b,d,a,e,c]), [e,d,c,b,a]) self.assertEquals(InternalTarget.sort_targets([e,d,c,b,a]), [e,d,c,b,a])
def __init__(self, name, dependencies=None, num_sources=0, exclusives=None): with ParseContext.temp(): InternalTarget.__init__(self, name, dependencies, exclusives=exclusives) TargetWithSources.__init__(self, name, exclusives=exclusives) self.num_sources = num_sources self.declared_exclusives = defaultdict(set) if exclusives is not None: for k in exclusives: self.declared_exclusives[k] = set([exclusives[k]]) self.exclusives = None
def run(self, lock): with self.check_errors("Target contains a dependency cycle") as error: with self.timer.timing("parse:check_cycles"): for target in self.targets: try: InternalTarget.check_cycles(target) except InternalTarget.CycleException as e: error(target.id) logger = None if self.options.log or self.options.log_level: from twitter.common.log import init from twitter.common.log.options import LogOptions LogOptions.set_stderr_log_level((self.options.log_level or "info").upper()) logdir = self.options.logdir or self.config.get("goals", "logdir", default=None) if logdir: safe_mkdir(logdir) LogOptions.set_log_dir(logdir) init("goals") else: init() logger = log if self.options.recursive_directory: log.warn("--all-recursive is deprecated, use a target spec with the form [dir]:: instead") for dir in self.options.recursive_directory: self.add_target_recursive(dir) if self.options.target_directory: log.warn("--all is deprecated, use a target spec with the form [dir]: instead") for dir in self.options.target_directory: self.add_target_directory(dir) context = Context(self.config, self.options, self.targets, lock=lock, log=logger) unknown = [] for phase in self.phases: if not phase.goals(): unknown.append(phase) if unknown: print("Unknown goal(s): %s" % " ".join(phase.name for phase in unknown)) print("") return Phase.execute(context, "goals") if logger: logger.debug("Operating on targets: %s", self.targets) ret = Phase.attempt(context, self.phases, timer=self.timer if self.options.time else None) if self.options.time: print("Timing report") print("=============") self.timer.print_timings() return ret
def run(self, lock): with self.check_errors("Target contains a dependency cycle") as error: with self.timer.timing('parse:check_cycles'): for target in self.targets: try: InternalTarget.check_cycles(target) except InternalTarget.CycleException as e: error(target.id) logger = None if self.options.log or self.options.log_level: from twitter.common.log import init from twitter.common.log.options import LogOptions LogOptions.set_stderr_log_level((self.options.log_level or 'info').upper()) logdir = self.options.logdir or self.config.get('goals', 'logdir', default=None) if logdir: safe_mkdir(logdir) LogOptions.set_log_dir(logdir) init('goals') else: init() logger = log if self.options.recursive_directory: log.warn('--all-recursive is deprecated, use a target spec with the form [dir]:: instead') for dir in self.options.recursive_directory: self.add_target_recursive(dir) if self.options.target_directory: log.warn('--all is deprecated, use a target spec with the form [dir]: instead') for dir in self.options.target_directory: self.add_target_directory(dir) context = Context(self.config, self.options, self.targets, lock=lock, log=logger) unknown = [] for phase in self.phases: if not phase.goals(): unknown.append(phase) if unknown: print('Unknown goal(s): %s' % ' '.join(phase.name for phase in unknown)) print('') return Phase.execute(context, 'goals') if logger: logger.debug('Operating on targets: %s', self.targets) ret = Phase.attempt(context, self.phases, timer=self.timer if self.options.time else None) if self.options.time: print('Timing report') print('=============') self.timer.print_timings() return ret
def test_detect_cycle_direct(self): a = MockTarget('a') # no cycles yet InternalTarget.sort_targets([a]) a.update_dependencies([a]) try: InternalTarget.sort_targets([a]) self.fail("Expected a cycle to be detected") except InternalTarget.CycleException: # expected pass
def testDetectCycleDirect(self): a = MockTarget('a') # no cycles yet InternalTarget.check_cycles(a) a.internal_dependencies = [ a ] try: InternalTarget.check_cycles(a) self.fail("Expected a cycle to be detected") except InternalTarget.CycleException: # expected pass
def testDetectCycleDirect(self): a = MockTarget('a') # no cycles yet InternalTarget.sort_targets([a]) a.internal_dependencies = [a] try: InternalTarget.sort_targets([a]) self.fail("Expected a cycle to be detected") except InternalTarget.CycleException: # expected pass
def testDetectCycleDirect(self): a = MockTarget("a") # no cycles yet InternalTarget.check_cycles(a) a.internal_dependencies = [a] try: InternalTarget.check_cycles(a) self.fail("Expected a cycle to be detected") except InternalTarget.CycleException: # expected pass
def test_validation(self): with ParseContext.temp('InternalTargetTest/test_validation'): InternalTarget(name="valid", dependencies=None) self.assertRaises(TargetDefinitionException, InternalTarget, name=1, dependencies=None) InternalTarget(name="valid2", dependencies=Target(name='mybird')) self.assertRaises(TargetDefinitionException, InternalTarget, name='valid3', dependencies=1)
def test_detect_cycle_indirect(self): c = MockTarget('c') b = MockTarget('b', [c]) a = MockTarget('a', [c, b]) # no cycles yet InternalTarget.sort_targets([a]) c.internal_dependencies = [a] try: InternalTarget.sort_targets([a]) self.fail("Expected a cycle to be detected") except InternalTarget.CycleException: # expected pass
def testDetectIndirect(self): c = MockTarget('c') b = MockTarget('b', c) a = MockTarget('a', c, b) # no cycles yet InternalTarget.sort_targets([a]) c.internal_dependencies = [a] try: InternalTarget.sort_targets([a]) self.fail("Expected a cycle to be detected") except InternalTarget.CycleException: # expected pass
def testDetectIndirect(self): c = MockTarget("c") b = MockTarget("b", c) a = MockTarget("a", c, b) # no cycles yet InternalTarget.check_cycles(a) c.internal_dependencies = [a] try: InternalTarget.check_cycles(a) self.fail("Expected a cycle to be detected") except InternalTarget.CycleException: # expected pass
def testDetectIndirect(self): c = MockTarget('c') b = MockTarget('b', c) a = MockTarget('a', c, b) # no cycles yet InternalTarget.check_cycles(a) c.internal_dependencies = [ a ] try: InternalTarget.check_cycles(a) self.fail("Expected a cycle to be detected") except InternalTarget.CycleException: # expected pass
def create_chunks(context, goals): def discriminator(target): for i, goal in enumerate(goals): if goal.group.predicate(target): return i return 'other' # TODO(John Sirois): coalescing should be made available in another spot, InternalTarget is jvm # specific, and all we care is that the Targets have dependencies defined coalesced = InternalTarget.coalesce_targets(context.targets(is_internal), discriminator) coalesced = list(reversed(coalesced)) def not_internal(target): return not is_internal(target) rest = OrderedSet(context.targets(not_internal)) chunks = [rest] if rest else [] flavor = None chunk_start = 0 for i, target in enumerate(coalesced): target_flavor = discriminator(target) if target_flavor != flavor and i > chunk_start: chunks.append(OrderedSet(coalesced[chunk_start:i])) chunk_start = i flavor = target_flavor if chunk_start < len(coalesced): chunks.append(OrderedSet(coalesced[chunk_start:])) context.log.debug('::: created chunks(%d)' % len(chunks)) for i, chunk in enumerate(chunks): context.log.debug(' chunk(%d):\n\t%s' % (i, '\n\t'.join(sorted(map(str, chunk))))) return chunks
def exported_targets(self): candidates = set() if self.transitive: candidates.update(self.context.targets()) else: candidates.update(self.context.target_roots) def get_synthetic(lang, target): mappings = self.context.products.get(lang).get(target) if mappings: for generated in mappings.itervalues(): for synthetic in generated: yield synthetic # Handle the case where a code gen target is in the listed roots and the thus the publishable # target is a synthetic twin generated by a code gen task upstream. for candidate in self.context.target_roots: candidates.update(get_synthetic('java', candidate)) candidates.update(get_synthetic('scala', candidate)) def exportable(tgt): return tgt in candidates and tgt.is_exported return OrderedSet( filter( exportable, reversed( InternalTarget.sort_targets(filter(exportable, candidates)))))
def _create_chunks(context, goals): def discriminator(target): for i, goal in enumerate(goals): if goal.group.predicate(target): return i return 'other' # First, divide the set of all targets to be built into compatible chunks, based # on their declared exclusives. Then, for each chunk of compatible exclusives, do # further subchunking. At the end, we'll have a list of chunks to be built, # which will go through the chunks of each exclusives-compatible group separately. # TODO(markcc); chunks with incompatible exclusives require separate ivy resolves. # Either interleave the ivy task in this group so that it runs once for each batch of # chunks with compatible exclusives, or make the compilation tasks do their own ivy resolves # for each batch of targets they're asked to compile. exclusives = Group._get_exclusives_product(context) sorted_excl_group_keys = exclusives.get_ordered_group_keys() all_chunks = [] for excl_group_key in sorted_excl_group_keys: # TODO(John Sirois): coalescing should be made available in another spot, InternalTarget is jvm # specific, and all we care is that the Targets have dependencies defined chunk_targets = exclusives.get_targets_for_group_key(excl_group_key) # need to extract the targets for this chunk that are internal. ## TODO(markcc): right here, we're using "context.targets", which doesn't respect any of the ## exclusives rubbish going on around here. #coalesced = InternalTarget.coalesce_targets(context.targets(is_internal), discriminator) coalesced = InternalTarget.coalesce_targets(filter(is_internal, chunk_targets), discriminator) coalesced = list(reversed(coalesced)) def not_internal(target): return not is_internal(target) # got targets that aren't internal. #rest = OrderedSet(context.targets(not_internal)) rest = OrderedSet(filter(not_internal, chunk_targets)) chunks = [rest] if rest else [] flavor = None chunk_start = 0 for i, target in enumerate(coalesced): target_flavor = discriminator(target) if target_flavor != flavor and i > chunk_start: chunks.append(OrderedSet(coalesced[chunk_start:i])) chunk_start = i flavor = target_flavor if chunk_start < len(coalesced): chunks.append(OrderedSet(coalesced[chunk_start:])) all_chunks += chunks context.log.debug('::: created chunks(%d)' % len(all_chunks)) for i, chunk in enumerate(all_chunks): context.log.debug(' chunk(%d):\n\t%s' % (i, '\n\t'.join(sorted(map(str, chunk))))) return all_chunks
def exported_targets(self): candidates = set() if self.transitive: candidates.update(self.context.targets()) else: candidates.update(self.context.target_roots) def get_synthetic(lang, target): mappings = self.context.products.get(lang).get(target) if mappings: for generated in mappings.itervalues(): for synthetic in generated: yield synthetic # Handle the case where a code gen target is in the listed roots and the thus the publishable # target is a synthetic twin generated by a code gen task upstream. for candidate in self.context.target_roots: candidates.update(get_synthetic('java', candidate)) candidates.update(get_synthetic('scala', candidate)) def exportable(tgt): return tgt in candidates and tgt.is_exported return OrderedSet(filter(exportable, reversed(InternalTarget.sort_targets(filter(exportable, candidates)))))
def __init__(self, root_dir, parser, argv): Command.__init__(self, root_dir, parser, argv) if not self.args: self.error("A spec argument is required") try: specs_end = self.args.index('--') if len(self.args) > specs_end: self.build_args = self.args.__getslice__( specs_end + 1, len(self.args) + 1) else: self.build_args = [] except ValueError: specs_end = 1 self.build_args = self.args[1:] if len(self.args) > 1 else [] self.targets = OrderedSet() for spec in self.args.__getslice__(0, specs_end): try: address = Address.parse(root_dir, spec) except: self.error("Problem parsing spec %s: %s" % (spec, traceback.format_exc())) try: target = Target.get(address) except: self.error("Problem parsing BUILD target %s: %s" % (address, traceback.format_exc())) try: InternalTarget.check_cycles(target) except CycleException as e: self.error("Target contains an internal dependency cycle: %s" % e) if not target: self.error("Target %s does not exist" % address) if not target.address.is_meta: target.address.is_meta = self.options.is_meta or address.is_meta self.targets.add(target) self.is_ide = self.options.is_ide self.ide_transitivity = self.options.ide_transitivity
def __init__(self, root_dir, parser, argv): Command.__init__(self, root_dir, parser, argv) if not self.args: self.error("A spec argument is required") try: specs_end = self.args.index('--') if len(self.args) > specs_end: self.build_args = self.args.__getslice__(specs_end + 1, len(self.args) + 1) else: self.build_args = [] except ValueError: specs_end = 1 self.build_args = self.args[1:] if len(self.args) > 1 else [] self.targets = OrderedSet() for spec in self.args.__getslice__(0, specs_end): try: address = Address.parse(root_dir, spec) except: self.error("Problem parsing spec %s: %s" % (spec, traceback.format_exc())) try: target = Target.get(address) except: self.error("Problem parsing BUILD target %s: %s" % (address, traceback.format_exc())) try: InternalTarget.check_cycles(target) except CycleException as e: self.error("Target contains an internal dependency cycle: %s" % e) if not target: self.error("Target %s does not exist" % address) if not target.address.is_meta: target.address.is_meta = self.options.is_meta or address.is_meta self.targets.add(target) self.is_ide = self.options.is_ide self.ide_transitivity = self.options.ide_transitivity
def _compute_transitive_deps_by_target(self): # Sort from least to most dependent. sorted_targets = reversed(InternalTarget.sort_targets(self._context.targets())) transitive_deps_by_target = defaultdict(set) # Iterate in dep order, to accumulate the transitive deps for each target. for target in sorted_targets: transitive_deps = set() if hasattr(target, 'dependencies'): for dep in target.dependencies: transitive_deps.update(transitive_deps_by_target.get(dep, [])) transitive_deps.add(dep) transitive_deps_by_target[target] = transitive_deps return transitive_deps_by_target
def exported_targets(self): candidates = set(self.context.targets() if self.transitive else self. context.target_roots) def exportable(target): return target in candidates and is_exported( target) and is_internal(target) return OrderedSet( filter( exportable, reversed( InternalTarget.sort_targets(filter(exportable, candidates)))))
def _compute_transitive_deps_by_target(self): """Map from target to all the targets it depends on, transitively.""" # Sort from least to most dependent. sorted_targets = reversed(InternalTarget.sort_targets(self._context.targets())) transitive_deps_by_target = defaultdict(set) # Iterate in dep order, to accumulate the transitive deps for each target. for target in sorted_targets: transitive_deps = set() if hasattr(target, 'dependencies'): for dep in target.dependencies: transitive_deps.update(transitive_deps_by_target.get(dep, [])) transitive_deps.add(dep) transitive_deps_by_target[target] = transitive_deps return transitive_deps_by_target
def __init__(self, name, dependencies=None, num_sources=0): with ParseContext.temp(): InternalTarget.__init__(self, name, dependencies) TargetWithSources.__init__(self, name) self.num_sources = num_sources
def exported_targets(self): candidates = set(self.context.targets() if self.transitive else self.context.target_roots) def exportable(target): return target in candidates and is_exported(target) and is_internal(target) return OrderedSet(filter(exportable, reversed(InternalTarget.sort_targets(filter(exportable, candidates)))))
def _create_chunks(context, goals): def discriminator(target): for i, goal in enumerate(goals): if goal.group.predicate(target): return i return 'other' # First, divide the set of all targets to be built into compatible chunks, based # on their declared exclusives. Then, for each chunk of compatible exclusives, do # further subchunking. At the end, we'll have a list of chunks to be built, # which will go through the chunks of each exclusives-compatible group separately. # TODO(markcc); chunks with incompatible exclusives require separate ivy resolves. # Either interleave the ivy task in this group so that it runs once for each batch of # chunks with compatible exclusives, or make the compilation tasks do their own ivy resolves # for each batch of targets they're asked to compile. exclusives = Group._get_exclusives_product(context) sorted_excl_group_keys = exclusives.get_ordered_group_keys() all_chunks = [] for excl_group_key in sorted_excl_group_keys: # TODO(John Sirois): coalescing should be made available in another spot, InternalTarget is jvm # specific, and all we care is that the Targets have dependencies defined chunk_targets = exclusives.get_targets_for_group_key( excl_group_key) # need to extract the targets for this chunk that are internal. ## TODO(markcc): right here, we're using "context.targets", which doesn't respect any of the ## exclusives rubbish going on around here. #coalesced = InternalTarget.coalesce_targets(context.targets(is_internal), discriminator) coalesced = InternalTarget.coalesce_targets( filter(is_internal, chunk_targets), discriminator) coalesced = list(reversed(coalesced)) def not_internal(target): return not is_internal(target) # got targets that aren't internal. #rest = OrderedSet(context.targets(not_internal)) rest = OrderedSet(filter(not_internal, chunk_targets)) chunks = [rest] if rest else [] flavor = None chunk_start = 0 for i, target in enumerate(coalesced): target_flavor = discriminator(target) if target_flavor != flavor and i > chunk_start: chunks.append(OrderedSet(coalesced[chunk_start:i])) chunk_start = i flavor = target_flavor if chunk_start < len(coalesced): chunks.append(OrderedSet(coalesced[chunk_start:])) all_chunks += chunks context.log.debug('::: created chunks(%d)' % len(all_chunks)) for i, chunk in enumerate(all_chunks): flavor = discriminator(iter(chunk).next()) context.log.debug( ' chunk(%d) [flavor=%s]:\n\t%s' % (i, flavor, '\n\t'.join(sorted(map(str, chunk))))) return all_chunks
def extract_target(java_targets, name=None): """Extracts a minimal set of linked targets from the given target's internal transitive dependency set. The root target in the extracted target set is returned. The algorithm does a topological sort of the internal targets and then tries to coalesce targets of a given type. Any target with a custom ant build xml will be excluded from the coalescing.""" # TODO(John Sirois): this is broken - representative_target is not necessarily representative representative_target = list(java_targets)[0] meta_target_base_name = "fast-%s" % (name if name else representative_target.name) provides = None deployjar = hasattr(representative_target, 'deployjar') and representative_target.deployjar buildflags = representative_target.buildflags def discriminator(tgt): # Chunk up our targets by (type, src base) - the javac task in the ant build relies upon a # single srcdir that points to the root of a package tree to ensure differential compilation # works. return type(tgt), tgt.target_base def create_target(category, target_name, target_index, targets): def name(name): return "%s-%s-%d" % (target_name, name, target_index) # TODO(John Sirois): JavaLibrary and ScalaLibrary can float here between src/ and tests/ - add # ant build support to allow the same treatment for JavaThriftLibrary and JavaProtobufLibrary # so that tests can house test IDL in tests/ target_type, base = category if target_type == JavaProtobufLibrary: return JavaProtobufLibrary._aggregate(name('protobuf'), provides, buildflags, targets) elif target_type == JavaThriftLibrary: return JavaThriftLibrary._aggregate(name('thrift'), provides, buildflags, targets) elif target_type == AnnotationProcessor: return AnnotationProcessor._aggregate(name('apt'), provides, targets) elif target_type == JavaLibrary: return JavaLibrary._aggregate(name('java'), provides, deployjar, buildflags, targets, base) elif target_type == ScalaLibrary: return ScalaLibrary._aggregate(name('scala'), provides, deployjar, buildflags, targets, base) elif target_type == JavaTests: return JavaTests._aggregate(name('java-tests'), buildflags, targets) elif target_type == ScalaTests: return ScalaTests._aggregate(name('scala-tests'), buildflags, targets) else: raise Exception("Cannot aggregate targets of type: %s" % target_type) # TODO(John Sirois): support a flag that selects conflict resolution policy - this currently # happens to mirror the ivy policy we use def resolve_conflicts(target): dependencies = {} for dependency in target.resolved_dependencies: for jar in dependency._as_jar_dependencies(): key = jar.org, jar.name previous = dependencies.get(key, jar) if jar.rev >= previous.rev: if jar != previous: print "WARNING: replacing %s with %s for %s" % ( previous, jar, target._id) target.resolved_dependencies.remove(previous) target.jar_dependencies.remove(previous) dependencies[key] = jar return target # chunk up our targets by type & custom build xml coalesced = InternalTarget.coalesce_targets(java_targets, discriminator) coalesced = list(reversed(coalesced)) start_type = discriminator(coalesced[0]) start = 0 descriptors = [] for current in range(0, len(coalesced)): current_target = coalesced[current] current_type = discriminator(current_target) if current_target.custom_antxml_path: if start < current: # if we have a type chunk to our left, record it descriptors.append((start_type, coalesced[start:current])) # record a chunk containing just the target that has the custom build xml to be conservative descriptors.append((current_type, [current_target])) start = current + 1 if current < (len(coalesced) - 1): start_type = discriminator(coalesced[start]) elif start_type != current_type: # record the type chunk we just left descriptors.append((start_type, coalesced[start:current])) start = current start_type = current_type if start < len(coalesced): # record the tail chunk descriptors.append((start_type, coalesced[start:])) # build meta targets aggregated from the chunks and keep track of which targets end up in which # meta targets meta_targets_by_target_id = dict() targets_by_meta_target = [] for (ttype, targets), index in zip(descriptors, reversed(range(0, len(descriptors)))): meta_target = resolve_conflicts( create_target(ttype, meta_target_base_name, index, targets)) targets_by_meta_target.append((meta_target, targets)) for target in targets: meta_targets_by_target_id[target._id] = meta_target # calculate the other meta-targets (if any) each meta-target depends on extra_targets_by_meta_target = [] for meta_target, targets in targets_by_meta_target: meta_deps = set() custom_antxml_path = None for target in targets: if target.custom_antxml_path: custom_antxml_path = target.custom_antxml_path for dep in target.resolved_dependencies: if is_jvm(dep): meta = meta_targets_by_target_id[dep._id] if meta != meta_target: meta_deps.add(meta) extra_targets_by_meta_target.append( (meta_target, meta_deps, custom_antxml_path)) def lift_excludes(meta_target): excludes = set() def lift(target): if target.excludes: excludes.update(target.excludes) for jar_dep in target.jar_dependencies: excludes.update(jar_dep.excludes) for internal_dep in target.internal_dependencies: lift(internal_dep) lift(meta_target) return excludes # link in the extra inter-meta deps meta_targets = [] for meta_target, extra_deps, custom_antxml_path in extra_targets_by_meta_target: meta_targets.append(meta_target) meta_target.update_dependencies(extra_deps) meta_target.excludes = lift_excludes(meta_target) meta_target.custom_antxml_path = custom_antxml_path sorted_meta_targets = InternalTarget.sort_targets(meta_targets) def prune_metas(target): if sorted_meta_targets: try: sorted_meta_targets.remove(target) except ValueError: # we've already removed target in the current walk pass # link any disconnected meta_target graphs so we can return 1 root target root = None while sorted_meta_targets: new_root = sorted_meta_targets[0] new_root.walk(prune_metas, is_jvm) if root: new_root.update_dependencies([root]) root = new_root return root
def extract_target(java_targets, name = None): """Extracts a minimal set of linked targets from the given target's internal transitive dependency set. The root target in the extracted target set is returned. The algorithm does a topological sort of the internal targets and then tries to coalesce targets of a given type. Any target with a custom ant build xml will be excluded from the coalescing.""" # TODO(John Sirois): this is broken - representative_target is not necessarily representative representative_target = list(java_targets)[0] meta_target_base_name = "fast-%s" % (name if name else representative_target.name) provides = None deployjar = hasattr(representative_target, 'deployjar') and representative_target.deployjar buildflags = representative_target.buildflags def discriminator(tgt): # Chunk up our targets by (type, src base) - the javac task in the ant build relies upon a # single srcdir that points to the root of a package tree to ensure differential compilation # works. return type(tgt), tgt.target_base def create_target(category, target_name, target_index, targets): def name(name): return "%s-%s-%d" % (target_name, name, target_index) # TODO(John Sirois): JavaLibrary and ScalaLibrary can float here between src/ and tests/ - add # ant build support to allow the same treatment for JavaThriftLibrary and JavaProtobufLibrary # so that tests can house test IDL in tests/ target_type, base = category if target_type == JavaProtobufLibrary: return JavaProtobufLibrary._aggregate(name('protobuf'), provides, buildflags, targets) elif target_type == JavaThriftLibrary: return JavaThriftLibrary._aggregate(name('thrift'), provides, buildflags, targets) elif target_type == AnnotationProcessor: return AnnotationProcessor._aggregate(name('apt'), provides, targets) elif target_type == JavaLibrary: return JavaLibrary._aggregate(name('java'), provides, deployjar, buildflags, targets, base) elif target_type == ScalaLibrary: return ScalaLibrary._aggregate(name('scala'), provides, deployjar, buildflags, targets, base) elif target_type == JavaTests: return JavaTests._aggregate(name('java-tests'), buildflags, targets) elif target_type == ScalaTests: return ScalaTests._aggregate(name('scala-tests'), buildflags, targets) else: raise Exception("Cannot aggregate targets of type: %s" % target_type) # TODO(John Sirois): support a flag that selects conflict resolution policy - this currently # happens to mirror the ivy policy we use def resolve_conflicts(target): dependencies = {} for dependency in target.resolved_dependencies: for jar in dependency._as_jar_dependencies(): key = jar.org, jar.name previous = dependencies.get(key, jar) if jar.rev >= previous.rev: if jar != previous: print "WARNING: replacing %s with %s for %s" % (previous, jar, target._id) target.resolved_dependencies.remove(previous) target.jar_dependencies.remove(previous) dependencies[key] = jar return target # chunk up our targets by type & custom build xml coalesced = InternalTarget.coalesce_targets(java_targets, discriminator) coalesced = list(reversed(coalesced)) start_type = discriminator(coalesced[0]) start = 0 descriptors = [] for current in range(0, len(coalesced)): current_target = coalesced[current] current_type = discriminator(current_target) if current_target.custom_antxml_path: if start < current: # if we have a type chunk to our left, record it descriptors.append((start_type, coalesced[start:current])) # record a chunk containing just the target that has the custom build xml to be conservative descriptors.append((current_type, [current_target])) start = current + 1 if current < (len(coalesced) - 1): start_type = discriminator(coalesced[start]) elif start_type != current_type: # record the type chunk we just left descriptors.append((start_type, coalesced[start:current])) start = current start_type = current_type if start < len(coalesced): # record the tail chunk descriptors.append((start_type, coalesced[start:])) # build meta targets aggregated from the chunks and keep track of which targets end up in which # meta targets meta_targets_by_target_id = dict() targets_by_meta_target = [] for (ttype, targets), index in zip(descriptors, reversed(range(0, len(descriptors)))): meta_target = resolve_conflicts(create_target(ttype, meta_target_base_name, index, targets)) targets_by_meta_target.append((meta_target, targets)) for target in targets: meta_targets_by_target_id[target._id] = meta_target # calculate the other meta-targets (if any) each meta-target depends on extra_targets_by_meta_target = [] for meta_target, targets in targets_by_meta_target: meta_deps = set() custom_antxml_path = None for target in targets: if target.custom_antxml_path: custom_antxml_path = target.custom_antxml_path for dep in target.resolved_dependencies: if is_jvm(dep): meta = meta_targets_by_target_id[dep._id] if meta != meta_target: meta_deps.add(meta) extra_targets_by_meta_target.append((meta_target, meta_deps, custom_antxml_path)) def lift_excludes(meta_target): excludes = set() def lift(target): if target.excludes: excludes.update(target.excludes) for jar_dep in target.jar_dependencies: excludes.update(jar_dep.excludes) for internal_dep in target.internal_dependencies: lift(internal_dep) lift(meta_target) return excludes # link in the extra inter-meta deps meta_targets = [] for meta_target, extra_deps, custom_antxml_path in extra_targets_by_meta_target: meta_targets.append(meta_target) meta_target.update_dependencies(extra_deps) meta_target.excludes = lift_excludes(meta_target) meta_target.custom_antxml_path = custom_antxml_path sorted_meta_targets = InternalTarget.sort_targets(meta_targets) def prune_metas(target): if sorted_meta_targets: try: sorted_meta_targets.remove(target) except ValueError: # we've already removed target in the current walk pass # link any disconnected meta_target graphs so we can return 1 root target root = None while sorted_meta_targets: new_root = sorted_meta_targets[0] new_root.walk(prune_metas, is_jvm) if root: new_root.update_dependencies([root]) root = new_root return root