def __handle_flag_value (manager, value, ps): assert isinstance(value, basestring) assert isinstance(ps, property_set.PropertySet) result = [] if get_grist (value): f = feature.get(value) values = ps.get(f) for value in values: if f.dependency: # the value of a dependency feature is a target # and must be actualized result.append(value.actualize()) elif f.path or f.free: # Treat features with && in the value # specially -- each &&-separated element is considered # separate value. This is needed to handle searched # libraries, which must be in specific order. if not __re_two_ampersands.search(value): result.append(value) else: result.extend(value.split ('&&')) else: result.append (value) else: result.append (value) return sequence.unique(result, stable=True)
def refine(properties, requirements): """ Refines 'properties' by overriding any non-free properties for which a different value is specified in 'requirements'. Conditional requirements are just added without modification. Returns the resulting list of properties. """ # The result has no duplicates, so we store it in a set result = set() # Records all requirements. required = {} # All the elements of requirements should be present in the result # Record them so that we can handle 'properties'. for r in requirements: # Don't consider conditional requirements. if not r.condition(): required[r.feature()] = r for p in properties: # Skip conditional properties if p.condition(): result.add(p) # No processing for free properties elif p.feature().free(): result.add(p) else: if required.has_key(p.feature()): result.add(required[p.feature()]) else: result.add(p) return sequence.unique(list(result) + requirements)
def viable_source_types_for_generator_real (generator): """ Returns the list of source types, which, when passed to 'run' method of 'generator', has some change of being eventually used (probably after conversion by other generators) """ source_types = generator.source_types () if not source_types: # If generator does not specify any source types, # it might be special generator like builtin.lib-generator # which just relays to other generators. Return '*' to # indicate that any source type is possibly OK, since we don't # know for sure. return ['*'] else: result = [] for s in source_types: viable_sources = viable_source_types(s) if viable_sources == "*": result = ["*"] break else: result.extend(type.all_derived(s) + viable_sources) return unique(result)
def viable_source_types_for_generator_real(generator): """ Returns the list of source types, which, when passed to 'run' method of 'generator', has some change of being eventually used (probably after conversion by other generators) """ source_types = generator.source_types() if not source_types: # If generator does not specify any source types, # it might be special generator like builtin.lib-generator # which just relays to other generators. Return '*' to # indicate that any source type is possibly OK, since we don't # know for sure. return ['*'] else: result = [] for s in source_types: viable_sources = viable_source_types(s) if viable_sources == "*": result = ["*"] break else: result.extend(type.all_derived(s) + viable_sources) return unique(result)
def all_referenced_targets(self, result): """Returns all targets referenced by this subvariant, either directly or indirectly, and either as sources, or as dependency properties. Targets referred with dependency property are returned a properties, not targets.""" # Find directly referenced targets. deps = self.build_properties().dependency() all_targets = self.sources_ + deps # Find other subvariants. r = [] for e in all_targets: if not e in result: result.add(e) if isinstance(e, property.Property): t = e.value() else: t = e # FIXME: how can this be? cs = t.creating_subvariant() if cs: r.append(cs) r = unique(r) for s in r: if s != self: s.all_referenced_targets(result)
def __handle_flag_value(manager, value, ps): assert isinstance(value, basestring) assert isinstance(ps, property_set.PropertySet) result = [] if get_grist(value): f = feature.get(value) values = ps.get(f) for value in values: if f.dependency: # the value of a dependency feature is a target # and must be actualized result.append(value.actualize()) elif f.path or f.free: # Treat features with && in the value # specially -- each &&-separated element is considered # separate value. This is needed to handle searched # libraries, which must be in specific order. if not __re_two_ampersands.search(value): result.append(value) else: result.extend(value.split('&&')) else: result.append(value) else: result.append(value) return sequence.unique(result, stable=True)
def register(g): """ Registers new generator instance 'g'. """ id = g.id() __generators[id] = g # A generator can produce several targets of the # same type. We want unique occurence of that generator # in .generators.$(t) in that case, otherwise, it will # be tried twice and we'll get false ambiguity. for t in sequence.unique(g.target_types()): __type_to_generators.setdefault(t, []).append(g) # Update the set of generators for toolset # TODO: should we check that generator with this id # is not already registered. For example, the fop.jam # module intentionally declared two generators with the # same id, so such check will break it. # Some generators have multiple periods in their name, so the # normal $(id:S=) won't generate the right toolset name. # e.g. if id = gcc.compile.c++, then # .generators-for-toolset.$(id:S=) will append to # .generators-for-toolset.gcc.compile, which is a separate # value from .generators-for-toolset.gcc. Correcting this # makes generator inheritance work properly. # See also inherit-generators in module toolset base = id.split('.', 100)[0] __generators_for_toolset.setdefault(base, []).append(g)
def __init__ (self, main_target, prop_set, sources, build_properties, sources_usage_requirements, created_targets): """ main_target: The instance of MainTarget class prop_set: Properties requested for this target sources: build_properties: Actually used properties sources_usage_requirements: Properties propagated from sources created_targets: Top-level created targets """ self.main_target_ = main_target self.properties_ = prop_set self.sources_ = sources self.build_properties_ = build_properties self.sources_usage_requirements_ = sources_usage_requirements self.created_targets_ = created_targets self.usage_requirements_ = None # Pre-compose the list of other dependency graphs, on which this one # depends deps = build_properties.get('<implicit-dependency>') self.other_dg_ = [] for d in deps: self.other_dg_.append(d.creating_subvariant ()) self.other_dg_ = unique (self.other_dg_) self.implicit_includes_cache_ = {} self.target_directories_ = None
def register (g): """ Registers new generator instance 'g'. """ id = g.id () __generators [id] = g # A generator can produce several targets of the # same type. We want unique occurence of that generator # in .generators.$(t) in that case, otherwise, it will # be tried twice and we'll get false ambiguity. for t in sequence.unique(g.target_types()): __type_to_generators.setdefault(t, []).append(g) # Update the set of generators for toolset # TODO: should we check that generator with this id # is not already registered. For example, the fop.jam # module intentionally declared two generators with the # same id, so such check will break it. # Some generators have multiple periods in their name, so the # normal $(id:S=) won't generate the right toolset name. # e.g. if id = gcc.compile.c++, then # .generators-for-toolset.$(id:S=) will append to # .generators-for-toolset.gcc.compile, which is a separate # value from .generators-for-toolset.gcc. Correcting this # makes generator inheritance work properly. # See also inherit-generators in module toolset base = id.split ('.', 100) [0] __generators_for_toolset.setdefault(base, []).append(g)
def convert_multiple_sources_to_consumable_types (self, project, prop_set, sources): """ Converts several files to consumable types. """ if __debug__: from .targets import ProjectTarget assert isinstance(project, ProjectTarget) assert isinstance(prop_set, property_set.PropertySet) assert is_iterable_typed(sources, virtual_target.VirtualTarget) if not self.source_types_: return list(sources) acceptable_types = set() for t in self.source_types_: acceptable_types.update(type.all_derived(t)) result = [] for source in sources: if source.type() not in acceptable_types: transformed = construct_types( project, None,self.source_types_, prop_set, [source]) # construct_types returns [prop_set, [targets]] for t in transformed[1]: if t.type() in self.source_types_: result.append(t) if not transformed: project.manager().logger().log(__name__, " failed to convert ", source) else: result.append(source) result = sequence.unique(result, stable=True) return result
def convert_to_consumable_types (self, project, name, prop_set, sources, only_one=False): """ Attempts to convert 'source' to the types that this generator can handle. The intention is to produce the set of targets can should be used when generator is run. only_one: convert 'source' to only one of source types if there's more that one possibility, report an error. Returns a pair: consumed: all targets that can be consumed. """ if __debug__: from .targets import ProjectTarget assert isinstance(name, basestring) or name is None assert isinstance(project, ProjectTarget) assert isinstance(prop_set, property_set.PropertySet) assert is_iterable_typed(sources, virtual_target.VirtualTarget) assert isinstance(only_one, bool) consumed = [] missing_types = [] if len (sources) > 1: # Don't know how to handle several sources yet. Just try # to pass the request to other generator missing_types = self.source_types_ else: (c, m) = self.consume_directly (sources [0]) consumed += c missing_types += m # No need to search for transformation if # some source type has consumed source and # no more source types are needed. if only_one and consumed: missing_types = [] #TODO: we should check that only one source type #if create of 'only_one' is true. # TODO: consider if consuned/bypassed separation should # be done by 'construct_types'. if missing_types: transformed = construct_types (project, name, missing_types, prop_set, sources) # Add targets of right type to 'consumed'. Add others to # 'bypassed'. The 'generators.construct' rule has done # its best to convert everything to the required type. # There's no need to rerun it on targets of different types. # NOTE: ignoring usage requirements for t in transformed[1]: if t.type() in missing_types: consumed.append(t) consumed = unique(consumed) return consumed
def collect_targets(self, targets): s = [t.creating_subvariant() for t in targets] s = unique(s) result = set(targets) for i in s: i.all_referenced_targets(result) result2 = [] for r in result: if isinstance(r, property.Property): if r.feature().name() != "use": result2.append(r.value()) else: result2.append(r) result2 = unique(result2) return result2
def collect_targets(self, targets): s = [t.creating_subvariant() for t in targets] s = unique(filter(lambda l: l != None, s)) result = set(targets) for i in s: i.all_referenced_targets(result) result2 = [] for r in result: if isinstance(r, property.Property): if r.feature().name() != 'use': result2.append(r.value()) else: result2.append(r) result2 = unique(result2) return result2
def collect_targets(self, targets): s = [t.creating_subvariant() for t in targets] s = unique(filter(lambda l: l != None,s)) result = set(targets) for i in s: i.all_referenced_targets(result) result2 = [] for r in result: if isinstance(r, property.Property): if r.feature.name != 'use': result2.append(r.value) else: result2.append(r) result2 = unique(result2) return result2
def compute_target_directories(self, target_type=None): result = [] for t in self.created_targets(): if not target_type or b2.build.type.is_derived(t.type(), target_type): result.append(t.path()) for d in self.other_dg_: result.extend(d.all_target_directories(target_type)) result = unique(result) return result
def build_multiple(self, property_sets): usage_requirements = property_set.empty() result = [] for p in property_sets: r = AliasTarget.generate(self, p) if r: usage_requirements = usage_requirements.add(r.usage_requirements()) result.extend(r.targets()) return targets.GenerateResult(usage_requirements, unique(result))
def build_multiple(self, property_sets): usage_requirements = property_set.empty() result = [] for p in property_sets: r = AliasTarget.generate(self, p) if r: usage_requirements = usage_requirements.add( r.usage_requirements()) result.extend(r.targets()) return targets.GenerateResult(usage_requirements, unique(result))
def create (raw_properties = []): """ Creates a new 'PropertySet' instance for the given raw properties, or returns an already existing one. """ raw_properties.sort () raw_properties = unique (raw_properties) key = '-'.join (raw_properties) if not __cache.has_key (key): __cache [key] = PropertySet (raw_properties) return __cache [key]
def create(raw_properties=[]): """ Creates a new 'PropertySet' instance for the given raw properties, or returns an already existing one. """ raw_properties.sort() raw_properties = unique(raw_properties) key = '-'.join(raw_properties) if not __cache.has_key(key): __cache[key] = PropertySet(raw_properties) return __cache[key]
def run_path_setup(target, sources, ps): # For testing, we need to make sure that all dynamic libraries needed by the # test are found. So, we collect all paths from dependency libraries (via # xdll-path property) and add whatever explicit dll-path user has specified. # The resulting paths are added to the environment on each test invocation. dll_paths = ps.get('dll-path') dll_paths.extend(ps.get('xdll-path')) dll_paths.extend(bjam.call("get-target-variable", sources, "RUN_PATH")) dll_paths = unique(dll_paths) if dll_paths: bjam.call("set-target-variable", target, "PATH_SETUP", common.prepend_path_variable_command( common.shared_library_path_variable(), dll_paths))
def register(g): """ Registers new generator instance 'g'. """ assert isinstance(g, Generator) id = g.id() __generators[id] = g # A generator can produce several targets of the # same type. We want unique occurrence of that generator # in .generators.$(t) in that case, otherwise, it will # be tried twice and we'll get false ambiguity. for t in sequence.unique(g.target_types()): __type_to_generators.setdefault(t, []).append(g) # Update the set of generators for toolset # TODO: should we check that generator with this id # is not already registered. For example, the fop.jam # module intentionally declared two generators with the # same id, so such check will break it. # Some generators have multiple periods in their name, so the # normal $(id:S=) won't generate the right toolset name. # e.g. if id = gcc.compile.c++, then # .generators-for-toolset.$(id:S=) will append to # .generators-for-toolset.gcc.compile, which is a separate # value from .generators-for-toolset.gcc. Correcting this # makes generator inheritance work properly. # See also inherit-generators in module toolset base = id.split('.', 100)[0] __generators_for_toolset.setdefault(base, []).append(g) # After adding a new generator that can construct new target types, we need # to clear the related cached viable source target type information for # constructing a specific target type or using a specific generator. Cached # viable source target type lists affected by this are those containing any # of the target types constructed by the new generator or any of their base # target types. # # A more advanced alternative to clearing that cached viable source target # type information would be to expand it with additional source types or # even better - mark it as needing to be expanded on next use. # # For now we just clear all the cached viable source target type information # that does not simply state 'all types' and may implement a more detailed # algorithm later on if it becomes needed. invalidate_extendable_viable_source_target_type_cache()
def register (g): """ Registers new generator instance 'g'. """ assert isinstance(g, Generator) id = g.id() __generators [id] = g # A generator can produce several targets of the # same type. We want unique occurence of that generator # in .generators.$(t) in that case, otherwise, it will # be tried twice and we'll get false ambiguity. for t in sequence.unique(g.target_types()): __type_to_generators.setdefault(t, []).append(g) # Update the set of generators for toolset # TODO: should we check that generator with this id # is not already registered. For example, the fop.jam # module intentionally declared two generators with the # same id, so such check will break it. # Some generators have multiple periods in their name, so the # normal $(id:S=) won't generate the right toolset name. # e.g. if id = gcc.compile.c++, then # .generators-for-toolset.$(id:S=) will append to # .generators-for-toolset.gcc.compile, which is a separate # value from .generators-for-toolset.gcc. Correcting this # makes generator inheritance work properly. # See also inherit-generators in module toolset base = id.split ('.', 100) [0] __generators_for_toolset.setdefault(base, []).append(g) # After adding a new generator that can construct new target types, we need # to clear the related cached viable source target type information for # constructing a specific target type or using a specific generator. Cached # viable source target type lists affected by this are those containing any # of the target types constructed by the new generator or any of their base # target types. # # A more advanced alternative to clearing that cached viable source target # type information would be to expand it with additional source types or # even better - mark it as needing to be expanded on next use. # # For now we just clear all the cached viable source target type information # that does not simply state 'all types' and may implement a more detailed # algorithm later on if it becomes needed. invalidate_extendable_viable_source_target_type_cache()
def set_library_order(manager, sources, prop_set, result): used_libraries = [] deps = prop_set.dependency() sources.extend(d.value() for d in deps) sources = sequence.unique(sources) for l in sources: if l.type() and type.is_derived(l.type(), 'LIB'): used_libraries.append(l) created_libraries = [] for l in result: if l.type() and type.is_derived(l.type(), 'LIB'): created_libraries.append(l) created_libraries = set.difference(created_libraries, used_libraries) set_library_order_aux(created_libraries, used_libraries)
def set_library_order (manager, sources, prop_set, result): used_libraries = [] deps = prop_set.dependency () sources.extend(d.value() for d in deps) sources = sequence.unique(sources) for l in sources: if l.type () and type.is_derived (l.type (), 'LIB'): used_libraries.append (l) created_libraries = [] for l in result: if l.type () and type.is_derived (l.type (), 'LIB'): created_libraries.append (l) created_libraries = set.difference (created_libraries, used_libraries) set_library_order_aux (created_libraries, used_libraries)
def create (raw_properties = []): """ Creates a new 'PropertySet' instance for the given raw properties, or returns an already existing one. """ # FIXME: propagate to callers. if len(raw_properties) > 0 and isinstance(raw_properties[0], property.Property): x = raw_properties else: x = [property.create_from_string(ps) for ps in raw_properties] x.sort() x = unique(x, stable=True) # FIXME: can we do better, e.g. by directly computing # hash value of the list? key = tuple(x) if not __cache.has_key (key): __cache [key] = PropertySet(x) return __cache [key]
def create (raw_properties = []): """ Creates a new 'PropertySet' instance for the given raw properties, or returns an already existing one. """ # FIXME: propagate to callers. if len(raw_properties) > 0 and isinstance(raw_properties[0], property.Property): x = raw_properties else: x = [property.create_from_string(ps) for ps in raw_properties] x.sort() x = unique (x) # FIXME: can we do better, e.g. by directly computing # hash value of the list? key = tuple(x) if not __cache.has_key (key): __cache [key] = PropertySet(x) return __cache [key]
def __viable_source_types_real (target_type): """ Returns a list of source type which can possibly be converted to 'target_type' by some chain of generator invocation. More formally, takes all generators for 'target_type' and returns union of source types for those generators and result of calling itself recusrively on source types. """ generators = [] t = type.all_bases (target_type) result = [] # 't' is the list of types which are not yet processed while t: # Find all generators for current type. # Unlike 'find_viable_generators' we don't care about prop_set. generators = __type_to_generators.get (t [0], []) t = t[1:] for g in generators: if not g.source_types(): # Empty source types -- everything can be accepted result = "*" # This will terminate outer loop. t = None break for source_type in g.source_types (): if not source_type in result: # If generator accepts 'source_type' it # will happily accept any type derived from it all = type.all_derived (source_type) for n in all: if not n in result: t.append (n) result.append (n) result = unique (result) return result
def __viable_source_types_real(target_type): """ Returns a list of source type which can possibly be converted to 'target_type' by some chain of generator invocation. More formally, takes all generators for 'target_type' and returns union of source types for those generators and result of calling itself recusrively on source types. """ generators = [] t = type.all_bases(target_type) result = [] # 't' is the list of types which are not yet processed while t: # Find all generators for current type. # Unlike 'find_viable_generators' we don't care about prop_set. generators = __type_to_generators.get(t[0], []) t = t[1:] for g in generators: if not g.source_types(): # Empty source types -- everything can be accepted result = "*" # This will terminate outer loop. t = None break for source_type in g.source_types(): if not source_type in result: # If generator accepts 'source_type' it # will happily accept any type derived from it all = type.all_derived(source_type) for n in all: if not n in result: t.append(n) result.append(n) result = unique(result) return result
def all_referenced_targets(self): """Returns all targets referenced by this subvariant, either directly or indirectly, and either as sources, or as dependency properties. Targets referred with dependency property are returned a properties, not targets.""" # Find directly referenced targets. deps = self.build_properties().dependency() all_targets = self.sources_ + deps # Find other subvariants. r = [] for t in all_targets: r.append(t.creating_subvariant) r = unique(r) for s in r: if s != self: all_targets.extend(s.all_referenced_targets()) return all_targets
def implicit_includes(self, feature, target_type): """ Returns the properties which specify implicit include paths to generated headers. This traverses all targets in this subvariant, and subvariants referred by <implcit-dependecy>properties. For all targets which are of type 'target-type' (or for all targets, if 'target_type' is not specified), the result will contain <$(feature)>path-to-that-target. """ if not target_type: key = feature else: key = feature + "-" + target_type result = self.implicit_includes_cache_.get(key) if not result: target_paths = self.all_target_directories(target_type) target_paths = unique(target_paths) result = ["<%s>%s" % (feature, p) for p in target_paths] self.implicit_includes_cache_[key] = result return result
def __init__(self, main_target, prop_set, sources, build_properties, sources_usage_requirements, created_targets): """ main_target: The instance of MainTarget class prop_set: Properties requested for this target sources: build_properties: Actually used properties sources_usage_requirements: Properties propagated from sources created_targets: Top-level created targets """ if __debug__: from .targets import AbstractTarget assert isinstance(main_target, AbstractTarget) assert isinstance(prop_set, property_set.PropertySet) assert is_iterable_typed(sources, VirtualTarget) assert isinstance(build_properties, property_set.PropertySet) assert isinstance(sources_usage_requirements, property_set.PropertySet) assert is_iterable_typed(created_targets, VirtualTarget) self.main_target_ = main_target self.properties_ = prop_set self.sources_ = sources self.build_properties_ = build_properties self.sources_usage_requirements_ = sources_usage_requirements self.created_targets_ = created_targets self.usage_requirements_ = None # Pre-compose the list of other dependency graphs, on which this one # depends deps = build_properties.get("<implicit-dependency>") self.other_dg_ = [] for d in deps: self.other_dg_.append(d.creating_subvariant()) self.other_dg_ = unique(self.other_dg_) self.implicit_includes_cache_ = {} self.target_directories_ = None
def convert_to_consumable_types (self, project, name, prop_set, sources, only_one=False): """ Attempts to convert 'source' to the types that this generator can handle. The intention is to produce the set of targets can should be used when generator is run. only_one: convert 'source' to only one of source types if there's more that one possibility, report an error. Returns a pair: consumed: all targets that can be consumed. bypassed: all targets that cannot be consumed. """ consumed = [] bypassed = [] missing_types = [] if len (sources) > 1: # Don't know how to handle several sources yet. Just try # to pass the request to other generator missing_types = self.source_types_ else: (c, m) = self.consume_directly (sources [0]) consumed += c missing_types += m # No need to search for transformation if # some source type has consumed source and # no more source types are needed. if only_one and consumed: missing_types = [] #TODO: we should check that only one source type #if create of 'only_one' is true. # TODO: consider if consuned/bypassed separation should # be done by 'construct_types'. if missing_types: transformed = construct_types (project, name, missing_types, prop_set, sources) # Add targets of right type to 'consumed'. Add others to # 'bypassed'. The 'generators.construct' rule has done # its best to convert everything to the required type. # There's no need to rerun it on targets of different types. # NOTE: ignoring usage requirements for t in transformed[1]: if t.type() in missing_types: consumed.append(t) else: bypassed.append(t) consumed = unique(consumed) bypassed = unique(bypassed) # remove elements of 'bypassed' that are in 'consumed' # Suppose the target type of current generator, X is produced from # X_1 and X_2, which are produced from Y by one generator. # When creating X_1 from Y, X_2 will be added to 'bypassed' # Likewise, when creating X_2 from Y, X_1 will be added to 'bypassed' # But they are also in 'consumed'. We have to remove them from # bypassed, so that generators up the call stack don't try to convert # them. # In this particular case, X_1 instance in 'consumed' and X_1 instance # in 'bypassed' will be the same: because they have the same source and # action name, and 'virtual-target.register' won't allow two different # instances. Therefore, it's OK to use 'set.difference'. bypassed = set.difference(bypassed, consumed) return (consumed, bypassed)
# which are in 'foo' or in any children Jamfiles, but not in any # unrelated Jamfiles. So, we collect the list of project under which # cleaning is allowed. # projects_to_clean = [] targets_to_clean = [] if clean or clean_all: for t in targets: if isinstance(t, ProjectTarget): projects_to_clean.append(t.project_module()) for t in results_of_main_targets: # Don't include roots or sources. targets_to_clean += b2.build.virtual_target.traverse(t) targets_to_clean = unique(targets_to_clean) is_child_cache_ = {} # Returns 'true' if 'project' is a child of 'current-project', # possibly indirect, or is equal to 'project'. # Returns 'false' otherwise. def is_child(project): r = is_child_cache_.get(project, None) if not r: if project in projects_to_clean: r = 1 else: parent = manager.projects().attribute(project, "parent-module") if parent and parent != "user-config":
def depends (self, d): """ Adds additional instances of 'VirtualTarget' that this one depends on. """ self.dependencies_ = unique (self.dependencies_ + d).sort ()
def convert_to_consumable_types(self, project, name, prop_set, sources, only_one=False): """ Attempts to convert 'source' to the types that this generator can handle. The intention is to produce the set of targets can should be used when generator is run. only_one: convert 'source' to only one of source types if there's more that one possibility, report an error. Returns a pair: consumed: all targets that can be consumed. bypassed: all targets that cannot be consumed. """ consumed = [] bypassed = [] missing_types = [] if len(sources) > 1: # Don't know how to handle several sources yet. Just try # to pass the request to other generator missing_types = self.source_types_ else: (c, m) = self.consume_directly(sources[0]) consumed += c missing_types += m # No need to search for transformation if # some source type has consumed source and # no more source types are needed. if only_one and consumed: missing_types = [] #TODO: we should check that only one source type #if create of 'only_one' is true. # TODO: consider if consuned/bypassed separation should # be done by 'construct_types'. if missing_types: transformed = construct_types(project, name, missing_types, prop_set, sources) # Add targets of right type to 'consumed'. Add others to # 'bypassed'. The 'generators.construct' rule has done # its best to convert everything to the required type. # There's no need to rerun it on targets of different types. # NOTE: ignoring usage requirements for t in transformed[1]: if t.type() in missing_types: consumed.append(t) else: bypassed.append(t) consumed = unique(consumed) bypassed = unique(bypassed) # remove elements of 'bypassed' that are in 'consumed' # Suppose the target type of current generator, X is produced from # X_1 and X_2, which are produced from Y by one generator. # When creating X_1 from Y, X_2 will be added to 'bypassed' # Likewise, when creating X_2 from Y, X_1 will be added to 'bypassed' # But they are also in 'consumed'. We have to remove them from # bypassed, so that generators up the call stack don't try to convert # them. # In this particular case, X_1 instance in 'consumed' and X_1 instance # in 'bypassed' will be the same: because they have the same source and # action name, and 'virtual-target.register' won't allow two different # instances. Therefore, it's OK to use 'set.difference'. bypassed = set.difference(bypassed, consumed) return (consumed, bypassed)
# which are in 'foo' or in any children Jamfiles, but not in any # unrelated Jamfiles. So, we collect the list of project under which # cleaning is allowed. # projects_to_clean = [] targets_to_clean = [] if clean or clean_all: for t in targets: if isinstance(t, ProjectTarget): projects_to_clean.append(t.project_module()) for t in results_of_main_targets: # Don't include roots or sources. targets_to_clean += b2.build.virtual_target.traverse(t) targets_to_clean = unique(targets_to_clean) is_child_cache_ = {} # Returns 'true' if 'project' is a child of 'current-project', # possibly indirect, or is equal to 'project'. # Returns 'false' otherwise. def is_child (project): r = is_child_cache_.get(project, None) if not r: if project in projects_to_clean: r = 1 else: parent = manager.projects().attribute(project, "parent-module") if parent and parent != "user-config":