def set_file_constraints(tasks): ins = Utils.defaultdict(set) outs = Utils.defaultdict(set) for x in tasks: for a in getattr(x, "inputs", []) + getattr(x, "dep_nodes", []): ins[id(a)].add(x) for a in getattr(x, "outputs", []): outs[id(a)].add(x) links = set(ins.keys()).intersection(outs.keys()) for k in links: for a in ins[k]: a.run_after.update(outs[k])
def set_file_constraints(tasks): ins = Utils.defaultdict(set) outs = Utils.defaultdict(set) for x in tasks: for a in getattr(x, 'inputs', []) + getattr(x, 'dep_nodes', []): ins[id(a)].add(x) for a in getattr(x, 'outputs', []): outs[id(a)].add(x) links = set(ins.keys()).intersection(outs.keys()) for k in links: for a in ins[k]: a.run_after.update(outs[k])
def runnable_status(self): if getattr(self, 'mod_fortran_done', None): return super(fc, self).runnable_status() bld = self.generator.bld lst = get_fortran_tasks(self) for tsk in lst: tsk.mod_fortran_done = True for tsk in lst: ret = tsk.runnable_status() if ret == Task.ASK_LATER: for x in lst: x.mod_fortran_done = None return Task.ASK_LATER ins = Utils.defaultdict(set) outs = Utils.defaultdict(set) for tsk in lst: key = tsk.uid() for x in bld.raw_deps[key]: if x.startswith('MOD@'): name = bld.modfile(x.replace('MOD@', '')) node = bld.srcnode.find_or_declare(name) tsk.set_outputs(node) outs[id(node)].add(tsk) for tsk in lst: key = tsk.uid() for x in bld.raw_deps[key]: if x.startswith('USE@'): name = bld.modfile(x.replace('USE@', '')) node = bld.srcnode.find_resource(name) if node and node not in tsk.outputs: if not node in bld.node_deps[key]: bld.node_deps[key].append(node) ins[id(node)].add(tsk) for k in ins.keys(): for a in ins[k]: a.run_after.update(outs[k]) tmp = [] for t in outs[k]: tmp.extend(t.outputs) a.dep_nodes.extend(tmp) try: a.dep_nodes.sort(key=lambda x: x.abspath()) except: a.dep_nodes.sort( lambda x, y: cmp(x.abspath(), y.abspath())) for tsk in lst: try: delattr(tsk, 'cache_sig') except AttributeError: pass return super(fc, self).runnable_status()
def set_file_constraints(tasks): "adds tasks to the task 'run_after' attribute based on the task inputs and outputs" ins = Utils.defaultdict(set) outs = Utils.defaultdict(set) for x in tasks: for a in getattr(x, 'inputs', []) + getattr(x, 'dep_nodes', []): ins[id(a)].add(x) for a in getattr(x, 'outputs', []): outs[id(a)].add(x) links = set(ins.keys()).intersection(outs.keys()) for k in links: for a in ins[k]: a.run_after.update(outs[k])
def set_file_constraints(tasks): ins = Utils.defaultdict(set) outs = Utils.defaultdict(set) for x in tasks: for a in x.inputs: ins[a].add(x) for a in x.dep_nodes: ins[a].add(x) for a in x.outputs: outs[a].add(x) links = set(ins.keys()).intersection(outs.keys()) for k in links: for a in ins[k]: a.run_after.update(outs[k])
def runnable_status(self): if getattr(self,'mod_fortran_done',None): return super(fc,self).runnable_status() bld=self.generator.bld lst=get_fortran_tasks(self) for tsk in lst: tsk.mod_fortran_done=True for tsk in lst: ret=tsk.runnable_status() if ret==Task.ASK_LATER: for x in lst: x.mod_fortran_done=None return Task.ASK_LATER ins=Utils.defaultdict(set) outs=Utils.defaultdict(set) for tsk in lst: key=tsk.uid() for x in bld.raw_deps[key]: if x.startswith('MOD@'): name=bld.modfile(x.replace('MOD@','')) node=bld.srcnode.find_or_declare(name) tsk.set_outputs(node) outs[id(node)].add(tsk) for tsk in lst: key=tsk.uid() for x in bld.raw_deps[key]: if x.startswith('USE@'): name=bld.modfile(x.replace('USE@','')) node=bld.srcnode.find_resource(name) if node and node not in tsk.outputs: if not node in bld.node_deps[key]: bld.node_deps[key].append(node) ins[id(node)].add(tsk) for k in ins.keys(): for a in ins[k]: a.run_after.update(outs[k]) tmp=[] for t in outs[k]: tmp.extend(t.outputs) a.dep_nodes.extend(tmp) try: a.dep_nodes.sort(key=lambda x:x.abspath()) except: a.dep_nodes.sort(lambda x,y:cmp(x.abspath(),y.abspath())) for tsk in lst: try: delattr(tsk,'cache_sig') except AttributeError: pass return super(fc,self).runnable_status()
def set_precedence_constraints(tasks): "adds tasks to the task 'run_after' attribute based on the after/before/ext_out/ext_in attributes" cstr_groups = Utils.defaultdict(list) for x in tasks: h = x.hash_constraints() cstr_groups[h].append(x) keys = list(cstr_groups.keys()) maxi = len(keys) # this list should be short for i in range(maxi): t1 = cstr_groups[keys[i]][0] for j in range(i + 1, maxi): t2 = cstr_groups[keys[j]][0] # add the constraints based on the comparisons if is_before(t1, t2): a = i b = j elif is_before(t2, t1): a = j b = i else: continue for x in cstr_groups[keys[b]]: x.run_after.update(cstr_groups[keys[a]])
def set_precedence_constraints(tasks): """ Add tasks to the task 'run_after' attribute based on the after/before/ext_out/ext_in attributes :param tasks: tasks :type tasks: list of :py:class:`waflib.Task.TaskBase` """ cstr_groups = Utils.defaultdict(list) for x in tasks: h = x.hash_constraints() cstr_groups[h].append(x) keys = list(cstr_groups.keys()) maxi = len(keys) # this list should be short for i in range(maxi): t1 = cstr_groups[keys[i]][0] for j in range(i + 1, maxi): t2 = cstr_groups[keys[j]][0] # add the constraints based on the comparisons if is_before(t1, t2): a = i b = j elif is_before(t2, t1): a = j b = i else: continue aval = set(cstr_groups[keys[a]]) for x in cstr_groups[keys[b]]: x.run_after.update(aval)
def set_precedence_constraints(tasks): """ Updates the ``run_after`` attribute of all tasks based on the after/before/ext_out/ext_in attributes :param tasks: tasks :type tasks: list of :py:class:`waflib.Task.TaskBase` """ cstr_groups = Utils.defaultdict(list) for x in tasks: h = x.hash_constraints() cstr_groups[h].append(x) keys = list(cstr_groups.keys()) maxi = len(keys) # this list should be short for i in range(maxi): t1 = cstr_groups[keys[i]][0] for j in range(i + 1, maxi): t2 = cstr_groups[keys[j]][0] # add the constraints based on the comparisons if is_before(t1, t2): a = i b = j elif is_before(t2, t1): a = j b = i else: continue aval = set(cstr_groups[keys[a]]) for x in cstr_groups[keys[b]]: x.run_after.update(aval)
def __init__(self,**kw): super(BuildContext,self).__init__(**kw) self.is_install=0 self.top_dir=kw.get('top_dir',Context.top_dir) self.out_dir=kw.get('out_dir',Context.out_dir) self.run_dir=kw.get('run_dir',Context.run_dir) self.launch_dir=Context.launch_dir self.post_mode=POST_LAZY self.cache_dir=kw.get('cache_dir') if not self.cache_dir: self.cache_dir=os.path.join(self.out_dir,CACHE_DIR) self.all_envs={} self.node_sigs={} self.task_sigs={} self.imp_sigs={} self.node_deps={} self.raw_deps={} self.task_gen_cache_names={} self.jobs=Options.options.jobs self.targets=Options.options.targets self.keep=Options.options.keep self.progress_bar=Options.options.progress_bar self.deps_man=Utils.defaultdict(list) self.current_group=0 self.groups=[] self.group_names={} for v in SAVED_ATTRS: if not hasattr(self,v): setattr(self,v,{})
def __init__(self, **kw): super(BuildContext, self).__init__(**kw) self.is_install = 0 self.top_dir = kw.get("top_dir", Context.top_dir) self.run_dir = kw.get("run_dir", Context.run_dir) self.post_mode = POST_AT_ONCE self.out_dir = kw.get("out_dir", Context.out_dir) self.cache_dir = kw.get("cache_dir", None) if not self.cache_dir: self.cache_dir = self.out_dir + os.sep + CACHE_DIR self.all_envs = {} self.task_sigs = {} self.node_deps = {} self.raw_deps = {} self.cache_dir_contents = {} self.task_gen_cache_names = {} self.launch_dir = Context.launch_dir self.jobs = Options.options.jobs self.targets = Options.options.targets self.keep = Options.options.keep self.cache_global = Options.cache_global self.nocache = Options.options.nocache self.progress_bar = Options.options.progress_bar self.deps_man = Utils.defaultdict(list) self.current_group = 0 self.groups = [] self.group_names = {}
def set_precedence_constraints(tasks): cstr_groups = Utils.defaultdict(list) for x in tasks: x.run_after = SetOfTasks(x) x.run_after_groups = [] x.waiting_sets = [] h = x.hash_constraints() cstr_groups[h].append(x) # create sets which can be reused for all tasks for k in cstr_groups.keys(): cstr_groups[k] = set(cstr_groups[k]) # this list should be short for key1, key2 in itertools.combinations(cstr_groups.keys(), 2): group1 = cstr_groups[key1] group2 = cstr_groups[key2] # get the first entry of the set t1 = next(iter(group1)) t2 = next(iter(group2)) # add the constraints based on the comparisons if Task.is_before(t1, t2): for x in group2: x.run_after_groups.append(group1) for k in group1: k.waiting_sets.append(group1) elif Task.is_before(t2, t1): for x in group1: x.run_after_groups.append(group2) for k in group2: k.waiting_sets.append(group2)
def set_precedence_constraints(tasks): cstr_groups = Utils.defaultdict(list) for x in tasks: h = x.hash_constraints() cstr_groups[h].append(x) keys = list(cstr_groups.keys()) maxi = len(keys) for i in range(maxi): t1 = cstr_groups[keys[i]][0] for j in range(i + 1, maxi): t2 = cstr_groups[keys[j]][0] if is_before(t1, t2): a = i b = j elif is_before(t2, t1): a = j b = i else: continue a = cstr_groups[keys[a]] b = cstr_groups[keys[b]] if len(a) < 2 or len(b) < 2: for x in b: x.run_after.update(a) else: group = TaskGroup(set(a), set(b)) for x in b: x.run_after.add(group)
def __init__(self, **kw): super(BuildContext, self).__init__(**kw) self.is_install = 0 self.top_dir = kw.get('top_dir', Context.top_dir) self.run_dir = kw.get('run_dir', Context.run_dir) self.post_mode = POST_AT_ONCE self.out_dir = kw.get('out_dir', Context.out_dir) self.cache_dir = kw.get('cache_dir', None) if not self.cache_dir: self.cache_dir = os.path.join(self.out_dir, CACHE_DIR) self.all_envs = {} self.task_sigs = {} self.node_deps = {} self.raw_deps = {} self.cache_dir_contents = {} self.task_gen_cache_names = {} self.launch_dir = Context.launch_dir self.jobs = Options.options.jobs self.targets = Options.options.targets self.keep = Options.options.keep self.progress_bar = Options.options.progress_bar self.deps_man = Utils.defaultdict(list) self.current_group = 0 self.groups = [] self.group_names = {}
def check_same_targets(self): mp=Utils.defaultdict(list) uids={} def check_task(tsk): if not isinstance(tsk,Task.Task): return for node in tsk.outputs: mp[node].append(tsk) try: uids[tsk.uid()].append(tsk) except: uids[tsk.uid()]=[tsk] for g in self.groups: for tg in g: try: for tsk in tg.tasks: check_task(tsk) except AttributeError: check_task(tg) dupe=False for(k,v)in mp.items(): if len(v)>1: dupe=True Logs.error('* Node %r is created by more than one task. The task generators are:'%k) for x in v: Logs.error(' %d. %r'%(1+v.index(x),x.generator)) if not dupe: for(k,v)in uids.items(): if len(v)>1: Logs.error('* Several tasks use the same identifier. Please check the information on\n http://waf.googlecode.com/svn/docs/apidocs/Task.html#waflib.Task.Task.uid') for tsk in v: Logs.error(' - object %r (%r) defined in %r'%(tsk.__class__.__name__,tsk,tsk.generator))
def __init__(self, bld, j=2): """ The initialization requires a build context reference for computing the total number of jobs. """ self.numjobs = j """ Amount of parallel consumers to use """ self.bld = bld """ Instance of :py:class:`waflib.Build.BuildContext` """ self.outstanding = PriorityTasks() """Heap of :py:class:`waflib.Task.Task` that may be ready to be executed""" self.postponed = PriorityTasks() """Heap of :py:class:`waflib.Task.Task` which are not ready to run for non-DAG reasons""" self.incomplete = set() """List of :py:class:`waflib.Task.Task` waiting for dependent tasks to complete (DAG)""" self.ready = PriorityQueue(0) """List of :py:class:`waflib.Task.Task` ready to be executed by consumers""" self.out = Queue(0) """List of :py:class:`waflib.Task.Task` returned by the task consumers""" self.count = 0 """Amount of tasks that may be processed by :py:class:`waflib.Runner.TaskConsumer`""" self.processed = 0 """Amount of tasks processed""" self.stop = False """Error flag to stop the build""" self.error = [] """Tasks that could not be executed""" self.biter = None """Task iterator which must give groups of parallelizable tasks when calling ``next()``""" self.dirty = False """ Flag that indicates that the build cache must be saved when a task was executed (calls :py:meth:`waflib.Build.BuildContext.store`)""" self.revdeps = Utils.defaultdict(set) """ The reverse dependency graph of dependencies obtained from Task.run_after """ self.spawner = Spawner(self) """
def __init__(self, bld, j=2): """ The initialization requires a build context reference for computing the total number of jobs. """ self.numjobs = j """ Amount of parallel consumers to use """ self.bld = bld """ Instance of :py:class:`waflib.Build.BuildContext` """ self.outstanding = PriorityTasks() """Heap of :py:class:`waflib.Task.Task` that may be ready to be executed""" self.postponed = PriorityTasks() """Heap of :py:class:`waflib.Task.Task` which are not ready to run for non-DAG reasons""" self.incomplete = set() """List of :py:class:`waflib.Task.Task` waiting for dependent tasks to complete (DAG)""" self.ready = Queue(0) """List of :py:class:`waflib.Task.Task` ready to be executed by consumers""" self.out = Queue(0) """List of :py:class:`waflib.Task.Task` returned by the task consumers""" self.count = 0 """Amount of tasks that may be processed by :py:class:`waflib.Runner.TaskConsumer`""" self.processed = 0 """Amount of tasks processed""" self.stop = False """Error flag to stop the build""" self.error = [] """Tasks that could not be executed""" self.biter = None """Task iterator which must give groups of parallelizable tasks when calling ``next()``""" self.dirty = False """ Flag that indicates that the build cache must be saved when a task was executed (calls :py:meth:`waflib.Build.BuildContext.store`)""" self.revdeps = Utils.defaultdict(set) """ The reverse dependency graph of dependencies obtained from Task.run_after """ self.spawner = Spawner(self) """
def check_same_targets(self): mp = Utils.defaultdict(list) uids = {} def check_task(tsk): if not isinstance(tsk, Task.Task): return if hasattr(tsk, 'no_errcheck_out'): return for node in tsk.outputs: mp[node].append(tsk) try: uids[tsk.uid()].append(tsk) except KeyError: uids[tsk.uid()] = [tsk] for g in self.groups: for tg in g: try: for tsk in tg.tasks: check_task(tsk) except AttributeError: # raised if not a task generator, which should be uncommon check_task(tg) dupe = False for (k, v) in mp.items(): if len(v) > 1: dupe = True msg = '* Node %r is created more than once%s. The task generators are:' % ( k, Logs.verbose == 1 and " (full message on 'waf -v -v')" or "") Logs.error(msg) for x in v: if Logs.verbose > 1: Logs.error(' %d. %r', 1 + v.index(x), x.generator) else: Logs.error(' %d. %r in %r', 1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None)) Logs.error( 'If you think that this is an error, set no_errcheck_out on the task instance' ) if not dupe: for (k, v) in uids.items(): if len(v) > 1: Logs.error( '* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid' ) tg_details = tsk.generator.name if Logs.verbose > 2: tg_details = tsk.generator for tsk in v: Logs.error(' - object %r (%r) defined in %r', tsk.__class__.__name__, tsk, tg_details)
def set_file_constraints(tasks): """ Adds tasks to the task 'run_after' attribute based on the task inputs and outputs :param tasks: tasks :type tasks: list of :py:class:`waflib.Task.TaskBase` """ ins = Utils.defaultdict(set) outs = Utils.defaultdict(set) for x in tasks: for a in getattr(x, 'inputs', []) + getattr(x, 'dep_nodes', []): ins[id(a)].add(x) for a in getattr(x, 'outputs', []): outs[id(a)].add(x) links = set(ins.keys()).intersection(outs.keys()) for k in links: for a in ins[k]: a.run_after.update(outs[k])
def set_file_constraints(tasks): """ Updates the ``run_after`` attribute of all tasks based on the task inputs and outputs :param tasks: tasks :type tasks: list of :py:class:`waflib.Task.TaskBase` """ ins = Utils.defaultdict(set) outs = Utils.defaultdict(set) for x in tasks: for a in getattr(x, 'inputs', []) + getattr(x, 'dep_nodes', []): ins[id(a)].add(x) for a in getattr(x, 'outputs', []): outs[id(a)].add(x) links = set(ins.keys()).intersection(outs.keys()) for k in links: for a in ins[k]: a.run_after.update(outs[k])
def __init__(self, *k, **kw): """ The task generator objects predefine various attributes (source, target) for possible processing by process_rule (make-like rules) or process_source (extensions, misc methods) The tasks are stored on the attribute 'tasks'. They are created by calling methods listed in self.meths *or* referenced in the attribute features A topological sort is performed to ease the method re-use. The extra key/value elements passed in kw are set as attributes """ # so we will have to play with directed acyclic graphs # detect cycles, etc self.source = '' self.target = '' self.meths = [] """ List of method names to execute (it is usually a good idea to avoid touching this) """ self.prec = Utils.defaultdict(list) """ Precedence table for sorting the methods in self.meths """ self.mappings = {} """ List of mappings {extension -> function} for processing files by extension """ self.features = [] """ List of feature names for bringing new methods in """ self.tasks = [] """ List of tasks created. """ if not 'bld' in kw: # task generators without a build context :-/ self.env = ConfigSet.ConfigSet() self.idx = 0 self.path = None else: self.bld = kw['bld'] self.env = self.bld.env.derive() self.path = self.bld.path # emulate chdir when reading scripts for key, val in kw.items(): setattr(self, key, val)
def set_file_constraints(tasks): """ Updates the ``run_after`` attribute of all tasks based on the task inputs and outputs :param tasks: tasks :type tasks: list of :py:class:`waflib.Task.Task` """ ins = Utils.defaultdict(set) outs = Utils.defaultdict(set) for x in tasks: for a in x.inputs: ins[a].add(x) for a in x.dep_nodes: ins[a].add(x) for a in x.outputs: outs[a].add(x) links = set(ins.keys()).intersection(outs.keys()) for k in links: for a in ins[k]: a.run_after.update(outs[k])
def check_same_targets(self): mp = Utils.defaultdict(list) uids = {} def check_task(tsk): if not isinstance(tsk, Task.Task): return for node in tsk.outputs: mp[node].append(tsk) try: uids[tsk.uid()].append(tsk) except KeyError: uids[tsk.uid()] = [tsk] for g in self.groups: for tg in g: try: for tsk in tg.tasks: check_task(tsk) except AttributeError: # raised if not a task generator, which should be uncommon check_task(tg) dupe = False for (k, v) in list(mp.items()): if len(v) > 1: dupe = True msg = '* Node %r is created more than once%s. The task generators are:' % ( k, Logs.verbose == 1 and " (full message on 'waf -v -v')" or "") Logs.error(msg) for x in v: if Logs.verbose > 1: Logs.error(' %d. %r' % (1 + v.index(x), x.generator)) else: Logs.error(' %d. %r in %r' % (1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None))) if not dupe: for (k, v) in list(uids.items()): if len(v) > 1: Logs.error( '* Several tasks use the same identifier. Please check the information on\n http://docs.waf.googlecode.com/git/apidocs_16/Task.html#waflib.Task.Task.uid' ) for tsk in v: Logs.error(' - object %r (%r) defined in %r' % (tsk.__class__.__name__, tsk, tsk.generator))
def check_same_targets(self): mp = Utils.defaultdict(list) uids = {} def check_task(tsk): if not isinstance(tsk, Task.Task): return for node in tsk.outputs: mp[node].append(tsk) try: uids[tsk.uid()].append(tsk) except: uids[tsk.uid()] = [tsk] for g in self.groups: for tg in g: try: for tsk in tg.tasks: check_task(tsk) except AttributeError: # raised if not a task generator, which should be uncommon check_task(tg) dupe = False for (k, v) in mp.items(): if len(v) > 1: dupe = True msg = "* Node %r is created by more than once%s. The task generators are:" % ( k, Logs.verbose == 1 and " (full message on 'waf -v -v')" or "", ) Logs.error(msg) for x in v: if Logs.verbose > 1: Logs.error(" %d. %r" % (1 + v.index(x), x.generator)) else: Logs.error( " %d. %r in %r" % (1 + v.index(x), x.generator.name, getattr(x.generator, "path", None)) ) if not dupe: for (k, v) in uids.items(): if len(v) > 1: Logs.error( "* Several tasks use the same identifier. Please check the information on\n http://waf.googlecode.com/svn/docs/apidocs/Task.html#waflib.Task.Task.uid" ) for tsk in v: Logs.error(" - object %r (%r) defined in %r" % (tsk.__class__.__name__, tsk, tsk.generator))
def check_same_targets(self): mp=Utils.defaultdict(list) for g in self.groups: for tg in g: try: for tsk in tg.tasks: for node in tsk.outputs: mp[node].append(tsk) except AttributeError: pass for(k,v)in mp.items(): if len(v)>1: Logs.error('* Node %r is created by more than one task. The task generators are:'%k) for x in v: Logs.error(' %d. %r'%(1+v.index(x),x.generator))
def check_same_targets(self): mp = Utils.defaultdict(list) uids = {} def check_task(tsk): if not isinstance(tsk, Task.Task): return if hasattr(tsk, 'no_errcheck_out'): return for node in tsk.outputs: mp[node].append(tsk) try: uids[tsk.uid()].append(tsk) except KeyError: uids[tsk.uid()] = [tsk] for g in self.groups: for tg in g: try: for tsk in tg.tasks: check_task(tsk) except AttributeError: # raised if not a task generator, which should be uncommon check_task(tg) dupe = False for (k, v) in mp.items(): if len(v) > 1: dupe = True msg = '* Node %r is created more than once%s. The task generators are:' % (k, Logs.verbose == 1 and " (full message on 'waf -v -v')" or "") Logs.error(msg) for x in v: if Logs.verbose > 1: Logs.error(' %d. %r', 1 + v.index(x), x.generator) else: Logs.error(' %d. %r in %r', 1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None)) Logs.error('If you think that this is an error, set no_errcheck_out on the task instance') if not dupe: for (k, v) in uids.items(): if len(v) > 1: Logs.error('* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid') tg_details = tsk.generator.name if Logs.verbose > 2: tg_details = tsk.generator for tsk in v: Logs.error(' - object %r (%r) defined in %r', tsk.__class__.__name__, tsk, tg_details)
def __init__(self,bld,j=2): self.numjobs=j self.bld=bld self.outstanding=PriorityTasks() self.postponed=PriorityTasks() self.incomplete=set() self.ready=Queue(0) self.out=Queue(0) self.count=0 self.processed=0 self.stop=False self.error=[] self.biter=None self.dirty=False self.revdeps=Utils.defaultdict(set) self.spawner=Spawner(self)
def __init__(self, *k, **kw): super(BuildContext, self).__init__(kw.get('start', None)) self.top_dir = kw.get('top_dir', Context.top_dir) self.run_dir = kw.get('run_dir', Context.run_dir) self.post_mode = POST_AT_ONCE """post the task generators at once, group-by-group, or both""" # output directory - may be set until the nodes are considered self.out_dir = kw.get('out_dir', Context.out_dir) self.cache_dir = kw.get('cache_dir', None) if not self.cache_dir: self.cache_dir = self.out_dir + os.sep + CACHE_DIR # map names to environments, the 'default' must be defined self.all_envs = {} # ======================================= # # cache variables for v in 'task_sigs node_deps raw_deps'.split(): setattr(self, v, {}) # list of folders that are already scanned # so that we do not need to stat them one more time self.cache_dir_contents = {} self.task_gen_cache_names = {} self.targets = Options.options.targets self.launch_dir = Context.launch_dir ############ stuff below has not been reviewed # Manual dependencies. self.deps_man = Utils.defaultdict(list) # just the structure here self.current_group = 0 self.groups = [] self.group_names = {}
def __init__(self, *kw, **kwargs): # so we will have to play with directed acyclic graphs # detect cycles, etc self.source = '' self.target = '' # list of methods to execute (it is usually a good idea to avoid touching this) self.meths = [] # precedence table for sorting the methods self.prec = Utils.defaultdict(list) # list of mappings extension -> function self.mappings = {} # list of methods to execute (by name) self.features = [] self.tasks = [] for key, val in kwargs.items(): setattr(self, key, val) try: bld = self.bld except AttributeError: self.env = ConfigSet.ConfigSet() self.idx = 0 self.path = None else: self.env = self.bld.env.derive() self.path = self.bld.path # emulate chdir when reading scripts # provide a unique id try: self.idx = self.bld.idx[id(self.path)] = self.bld.idx.get(id(self.path), 0) + 1 except AttributeError: self.bld.idx = {} self.idx = self.bld.idx[id(self.path)] = 0
def set_precedence_constraints(tasks): """ Updates the ``run_after`` attribute of all tasks based on the after/before/ext_out/ext_in attributes :param tasks: tasks :type tasks: list of :py:class:`waflib.Task.Task` """ cstr_groups = Utils.defaultdict(list) for x in tasks: h = x.hash_constraints() cstr_groups[h].append(x) keys = list(cstr_groups.keys()) maxi = len(keys) # this list should be short for i in range(maxi): t1 = cstr_groups[keys[i]][0] for j in range(i + 1, maxi): t2 = cstr_groups[keys[j]][0] # add the constraints based on the comparisons if is_before(t1, t2): a = i b = j elif is_before(t2, t1): a = j b = i else: continue a = cstr_groups[keys[a]] b = cstr_groups[keys[b]] if len(a) < 2 or len(b) < 2: for x in b: x.run_after.update(a) else: group = TaskGroup(set(a), set(b)) for x in b: x.run_after.add(group)
def set_precedence_constraints(tasks): cstr_groups = Utils.defaultdict(list) for x in tasks: h = x.hash_constraints() cstr_groups[h].append(x) keys = list(cstr_groups.keys()) maxi = len(keys) for i in range(maxi): t1 = cstr_groups[keys[i]][0] for j in range(i + 1, maxi): t2 = cstr_groups[keys[j]][0] if is_before(t1, t2): a = i b = j elif is_before(t2, t1): a = j b = i else: continue for x in cstr_groups[keys[b]]: x.run_after.update(cstr_groups[keys[a]])
def check_same_targets(self): mp = Utils.defaultdict(list) uids = {} def check_task(tsk): if not isinstance(tsk, Task.Task): return for node in tsk.outputs: mp[node].append(tsk) try: uids[tsk.uid()].append(tsk) except: uids[tsk.uid()] = [tsk] for g in self.groups: for tg in g: try: for tsk in tg.tasks: check_task(tsk) except AttributeError: check_task(tg) dupe = False for (k, v) in mp.items(): if len(v) > 1: dupe = True Logs.error( '* Node %r is created by more than one task. The task generators are:' % k) for x in v: Logs.error(' %d. %r' % (1 + v.index(x), x.generator)) if not dupe: for (k, v) in uids.items(): if len(v) > 1: Logs.error( '* Several tasks use the same identifier. Please check the information on\n http://waf.googlecode.com/svn/docs/apidocs/Task.html#waflib.Task.Task.uid' ) for tsk in v: Logs.error(' - object %r (%r) defined in %r' % (tsk.__class__.__name__, tsk, tsk.generator))
def __init__(self,*k,**kw): self.source='' self.target='' self.meths=[] self.prec=Utils.defaultdict(list) self.mappings={} self.features=[] self.tasks=[] if not'bld'in kw: self.env=ConfigSet.ConfigSet() self.idx=0 self.path=None else: self.bld=kw['bld'] self.env=self.bld.env.derive() self.path=self.bld.path try: self.idx=self.bld.idx[id(self.path)]=self.bld.idx.get(id(self.path),0)+1 except AttributeError: self.bld.idx={} self.idx=self.bld.idx[id(self.path)]=0 for key,val in kw.items(): setattr(self,key,val)
def __init__(self, **kw): super(BuildContext, self).__init__(**kw) self.top_dir = kw.get('top_dir', Context.top_dir) self.run_dir = kw.get('run_dir', Context.run_dir) self.post_mode = POST_AT_ONCE self.out_dir = kw.get('out_dir', Context.out_dir) self.cache_dir = kw.get('cache_dir', None) if not self.cache_dir: self.cache_dir = self.out_dir + os.sep + CACHE_DIR self.all_envs = {} for v in 'task_sigs node_deps raw_deps'.split(): setattr(self, v, {}) self.cache_dir_contents = {} self.task_gen_cache_names = {} self.launch_dir = Context.launch_dir self.targets = Options.options.targets self.keep = Options.options.keep self.cache_global = Options.cache_global self.nocache = Options.options.nocache self.progress_bar = Options.options.progress_bar self.deps_man = Utils.defaultdict(list) self.current_group = 0 self.groups = [] self.group_names = {}
# Thomas Nagy, 2005-2010 (ita) """ Task generators The class :py:class:`waflib.TaskGen.task_gen` encapsulates the creation of task objects (low-level code) The instances can have various parameters, but the creation of task nodes (Task.py) is always postponed. To achieve this, various methods are called from the method "apply" """ import copy, re from waflib import Task, Utils, Logs, Errors, ConfigSet feats = Utils.defaultdict(set) """remember the methods declaring features""" class task_gen(object): """ Instances of this class create :py:class:`waflib.Task.TaskBase` when calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread. A few notes: * The methods to call (*self.meths*) can be specified dynamically (removing, adding, ..) * The 'features' are used to add methods to self.meths and then execute them * The attribute 'path' is a node representing the location of the task generator * The tasks created are added to the attribute *tasks* * The attribute 'idx' is a counter of task generators in the same path """
class task_gen(object): mappings = {} prec = Utils.defaultdict(list) def __init__(self, *k, **kw): self.source = '' self.target = '' self.meths = [] self.prec = Utils.defaultdict(list) self.mappings = {} self.features = [] self.tasks = [] if not 'bld' in kw: self.env = ConfigSet.ConfigSet() self.idx = 0 self.path = None else: self.bld = kw['bld'] self.env = self.bld.env.derive() self.path = self.bld.path try: self.idx = self.bld.idx[id( self.path)] = self.bld.idx.get(id(self.path), 0) + 1 except AttributeError: self.bld.idx = {} self.idx = self.bld.idx[id(self.path)] = 0 for key, val in kw.items(): setattr(self, key, val) def __str__(self): return "<task_gen %r declared in %s>" % (self.name, self.path.abspath()) def __repr__(self): lst = [] for x in self.__dict__.keys(): if x not in ['env', 'bld', 'compiled_tasks', 'tasks']: lst.append("%s=%r" % (x, repr(getattr(self, x)))) return "bld(%s) in %s" % (" ".join(lst), self.path.abspath()) def get_name(self): try: return self._name except AttributeError: if isinstance(self.target, list): lst = [str(x) for x in self.target] name = self._name = ','.join(lst) else: name = self._name = str(self.target) return name def set_name(self, name): self._name = name name = property(get_name, set_name) def to_list(self, value): if isinstance(value, str): return value.split() else: return value def post(self): if getattr(self, 'posted', None): return False self.posted = True keys = set(self.meths) self.features = Utils.to_list(self.features) for x in self.features + ['*']: st = feats[x] if not st: Logs.warn( 'feature %r does not exist - bind at least one method to it' % x) keys.update(st) prec = {} prec_tbl = self.prec or task_gen.prec for x in prec_tbl: if x in keys: prec[x] = prec_tbl[x] tmp = [] for a in keys: for x in prec.values(): if a in x: break else: tmp.append(a) out = [] while tmp: e = tmp.pop() if e in keys: out.append(e) try: nlst = prec[e] except KeyError: pass else: del prec[e] for x in nlst: for y in prec: if x in prec[y]: break else: tmp.append(x) if prec: raise Errors.WafError('Cycle detected in the method execution %r' % prec) out.reverse() self.meths = out Logs.debug('task_gen: posting %s %d' % (self, id(self))) for x in out: try: v = getattr(self, x) except AttributeError: raise Errors.WafError( '%r is not a valid task generator method' % x) Logs.debug('task_gen: -> %s (%d)' % (x, id(self))) v() Logs.debug('task_gen: posted %s' % self.name) return True def get_hook(self, node): name = node.name for k in self.mappings: if name.endswith(k): return self.mappings[k] for k in task_gen.mappings: if name.endswith(k): return task_gen.mappings[k] raise Errors.WafError( "File %r has no mapping in %r (did you forget to load a waf tool?)" % (node, task_gen.mappings.keys())) def create_task(self, name, src=None, tgt=None): task = Task.classes[name](env=self.env.derive(), generator=self) if src: task.set_inputs(src) if tgt: task.set_outputs(tgt) self.tasks.append(task) return task def clone(self, env): newobj = self.bld() for x in self.__dict__: if x in ['env', 'bld']: continue elif x in ['path', 'features']: setattr(newobj, x, getattr(self, x)) else: setattr(newobj, x, copy.copy(getattr(self, x))) newobj.posted = False if isinstance(env, str): newobj.env = self.bld.all_envs[env].derive() else: newobj.env = env.derive() return newobj
#! /usr/bin/env python # encoding: utf-8 # WARNING! All changes made to this file will be lost! import sys if sys.hexversion < 0x020400f0: from sets import Set as set import copy, re from waflib import Task, Utils, Logs, Errors feats = Utils.defaultdict(set) class task_gen(object): mappings = {} prec = Utils.defaultdict(list) def __init__(self, *k, **kw): self.source = '' self.target = '' self.meths = [] self.prec = Utils.defaultdict(list) self.mappings = {} self.features = [] self.tasks = [] if not 'bld' in kw: self.env = ConfigSet.ConfigSet() self.idx = 0 self.path = None else: self.bld = kw['bld'] self.env = self.bld.env.derive() self.path = self.bld.path
def compute_needed_tgs(self): # assume the 'use' keys are not modified during the build phase dbfn = os.path.join(self.variant_dir, TSTAMP_DB) Logs.debug('rev_use: Loading %s', dbfn) try: data = Utils.readf(dbfn, 'rb') except (EnvironmentError, EOFError): Logs.debug('rev_use: Could not load the build cache %s (missing)', dbfn) self.f_deps = {} self.f_tstamps = {} else: try: self.f_tstamps, self.f_deps = Build.cPickle.loads(data) except Exception as e: Logs.debug('rev_use: Could not pickle the build cache %s: %r', dbfn, e) self.f_deps = {} self.f_tstamps = {} else: Logs.debug('rev_use: Loaded %s', dbfn) # 1. obtain task generators that contain rebuilds # 2. obtain the 'use' graph and its dual stales = set() reverse_use_map = Utils.defaultdict(list) use_map = Utils.defaultdict(list) for g in self.groups: for tg in g: if tg.is_stale(): stales.add(tg) try: lst = tg.use = Utils.to_list(tg.use) except AttributeError: pass else: for x in lst: try: xtg = self.get_tgen_by_name(x) except Errors.WafError: pass else: use_map[tg].append(xtg) reverse_use_map[xtg].append(tg) Logs.debug('rev_use: found %r stale tgs', len(stales)) # 3. dfs to post downstream tg as stale visited = set() def mark_down(tg): if tg in visited: return visited.add(tg) Logs.debug('rev_use: marking down %r as stale', tg.name) tg.staleness = DIRTY for x in reverse_use_map[tg]: mark_down(x) for tg in stales: mark_down(tg) # 4. dfs to find ancestors tg to mark as needed self.needed_tgs = needed_tgs = set() def mark_needed(tg): if tg in needed_tgs: return needed_tgs.add(tg) if tg.staleness == DONE: Logs.debug('rev_use: marking up %r as needed', tg.name) tg.staleness = NEEDED for x in use_map[tg]: mark_needed(x) for xx in visited: mark_needed(xx) # so we have the whole tg trees to post in the set "needed" # load their build trees for tg in needed_tgs: tg.bld.restore() tg.bld.fix_tg_path(tg) # the stale ones should be fully build, while the needed ones # may skip a few tasks, see create_compiled_task and apply_link_after below Logs.debug('rev_use: amount of needed task gens: %r', len(needed_tgs))
def runnable_status(self): """ set the mod file outputs and the dependencies on the mod files over all the fortran tasks there are no concurrency issues since the method runnable_status is executed by the main thread """ if getattr(self, 'mod_fortran_done', None): return super(fc, self).runnable_status() # now, if we reach this part it is because this fortran task is the first in the list bld = self.generator.bld # obtain the fortran tasks lst = [tsk for tsk in bld.producer.outstanding + bld.producer.frozen if isinstance(tsk, fc)] # disable this method for other tasks for tsk in lst: tsk.mod_fortran_done = True # wait for all the .f tasks to be ready for execution # and ensure that the scanners are called at least once for tsk in lst: ret = tsk.runnable_status() if ret == Task.ASK_LATER: # we have to wait for one of the other fortran tasks to be ready # this may deadlock if there are dependencies between the fortran tasks # but this should not happen (we are setting them here!) for x in lst: x.mod_fortran_done = None # TODO sort the list of tasks in bld.producer.outstanding to put all fortran tasks at the end return Task.ASK_LATER ins = Utils.defaultdict(set) outs = Utils.defaultdict(set) # the .mod files to create for tsk in lst: key = tsk.unique_id() for x in bld.raw_deps[key]: if x.startswith('MOD@'): name = x.replace('MOD@', '') + '.mod' node = bld.srcnode.find_or_declare(name) tsk.set_outputs(node) outs[id(node)].add(tsk) # the .mod files to use for tsk in lst: key = tsk.unique_id() for x in bld.raw_deps[key]: if x.startswith('USE@'): name = x.replace('USE@', '') + '.mod' node = bld.srcnode.find_resource(name) if node: if not node in bld.node_deps[key]: bld.node_deps[key].append(node) ins[id(node)].add(tsk) # if the intersection matches, set the order for k in ins.keys(): for a in ins[k]: a.run_after.update(outs[k]) # the task objects have changed: clear the signature cache for tsk in lst: try: delattr(tsk, 'cache_sig') except AttributeError: pass return super(fc, self).runnable_status()
#! /usr/bin/env python # encoding: utf-8 # WARNING! All changes made to this file will be lost! import sys if sys.hexversion < 0x020400f0: from sets import Set as set import os, sys, re from waflib import TaskGen, Task, Utils, Logs, Build, Options, Node, Errors from waflib.Logs import error, debug, warn from waflib.TaskGen import after_method, before_method, feature, taskgen_method from waflib.Tools import c_aliases, c_preproc, c_config, c_osx, c_tests from waflib.Configure import conf USELIB_VARS = Utils.defaultdict(set) USELIB_VARS['c'] = set( ['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CCDEPS', 'CFLAGS']) USELIB_VARS['cxx'] = set([ 'INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CXXDEPS', 'CXXFLAGS' ]) USELIB_VARS['d'] = set(['INCLUDES', 'DFLAGS']) USELIB_VARS['cprogram'] = USELIB_VARS['cxxprogram'] = set([ 'LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH' ]) USELIB_VARS['cshlib'] = USELIB_VARS['cxxshlib'] = set([ 'LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH' ]) USELIB_VARS['cstlib'] = USELIB_VARS['cxxstlib'] = set(['ARFLAGS', 'LINKDEPS']) USELIB_VARS['dprogram'] = set( ['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS']) USELIB_VARS['dshlib'] = set(
def __init__(self, **kw): super(BuildContext, self).__init__(**kw) self.is_install = 0 """Non-zero value when installing or uninstalling file""" self.top_dir = kw.get('top_dir', Context.top_dir) """See :py:attr:`waflib.Context.top_dir`; prefer :py:attr:`waflib.Build.BuildContext.srcnode`""" self.out_dir = kw.get('out_dir', Context.out_dir) """See :py:attr:`waflib.Context.out_dir`; prefer :py:attr:`waflib.Build.BuildContext.bldnode`""" self.run_dir = kw.get('run_dir', Context.run_dir) """See :py:attr:`waflib.Context.run_dir`""" self.launch_dir = Context.launch_dir """See :py:attr:`waflib.Context.out_dir`; prefer :py:meth:`waflib.Build.BuildContext.launch_node`""" self.post_mode = POST_LAZY """Whether to post the task generators at once or group-by-group (default is group-by-group)""" self.cache_dir = kw.get('cache_dir') if not self.cache_dir: self.cache_dir = os.path.join(self.out_dir, CACHE_DIR) self.all_envs = {} """Map names to :py:class:`waflib.ConfigSet.ConfigSet`, the empty string must map to the default environment""" # ======================================= # # cache variables self.node_sigs = {} """Dict mapping build nodes to task identifier (uid), it indicates whether a task created a particular file (persists across builds)""" self.task_sigs = {} """Dict mapping task identifiers (uid) to task signatures (persists across builds)""" self.imp_sigs = {} """Dict mapping task identifiers (uid) to implicit task dependencies used for scanning targets (persists across builds)""" self.node_deps = {} """Dict mapping task identifiers (uid) to node dependencies found by :py:meth:`waflib.Task.Task.scan` (persists across builds)""" self.raw_deps = {} """Dict mapping task identifiers (uid) to custom data returned by :py:meth:`waflib.Task.Task.scan` (persists across builds)""" self.task_gen_cache_names = {} self.jobs = Options.options.jobs """Amount of jobs to run in parallel""" self.targets = Options.options.targets """List of targets to build (default: \*)""" self.keep = Options.options.keep """Whether the build should continue past errors""" self.progress_bar = Options.options.progress_bar """ Level of progress status: 0. normal output 1. progress bar 2. IDE output 3. No output at all """ # Manual dependencies. self.deps_man = Utils.defaultdict(list) """Manual dependencies set by :py:meth:`waflib.Build.BuildContext.add_manual_dependency`""" # just the structure here self.current_group = 0 """ Current build group """ self.groups = [] """ List containing lists of task generators """ self.group_names = {} """ Map group names to the group lists. See :py:meth:`waflib.Build.BuildContext.add_group` """ for v in SAVED_ATTRS: if not hasattr(self, v): setattr(self, v, {})
def runnable_status(self): """ Set the mod file outputs and the dependencies on the mod files over all the fortran tasks executed by the main thread so there are no concurrency issues """ if getattr(self, "mod_fortran_done", None): return super(fc, self).runnable_status() # now, if we reach this part it is because this fortran task is the first in the list bld = self.generator.bld # obtain the fortran tasks lst = get_fortran_tasks(self) # disable this method for other tasks for tsk in lst: tsk.mod_fortran_done = True # wait for all the .f tasks to be ready for execution # and ensure that the scanners are called at least once for tsk in lst: ret = tsk.runnable_status() if ret == Task.ASK_LATER: # we have to wait for one of the other fortran tasks to be ready # this may deadlock if there are dependencies between the fortran tasks # but this should not happen (we are setting them here!) for x in lst: x.mod_fortran_done = None # TODO sort the list of tasks in bld.producer.outstanding to put all fortran tasks at the end return Task.ASK_LATER ins = Utils.defaultdict(set) outs = Utils.defaultdict(set) # the .mod files to create for tsk in lst: key = tsk.uid() for x in bld.raw_deps[key]: if x.startswith("MOD@"): name = bld.modfile(x.replace("MOD@", "")) node = bld.srcnode.find_or_declare(name) if not getattr(node, "sig", None): node.sig = Utils.SIG_NIL tsk.set_outputs(node) outs[id(node)].add(tsk) # the .mod files to use for tsk in lst: key = tsk.uid() for x in bld.raw_deps[key]: if x.startswith("USE@"): name = bld.modfile(x.replace("USE@", "")) node = bld.srcnode.find_resource(name) if node and node not in tsk.outputs: if not node in bld.node_deps[key]: bld.node_deps[key].append(node) ins[id(node)].add(tsk) # if the intersection matches, set the order for k in ins.keys(): for a in ins[k]: a.run_after.update(outs[k]) # the scanner cannot output nodes, so we have to set them # ourselves as task.dep_nodes (additional input nodes) tmp = [] for t in outs[k]: tmp.extend(t.outputs) a.dep_nodes.extend(tmp) a.dep_nodes.sort(key=lambda x: x.abspath()) # the task objects have changed: clear the signature cache for tsk in lst: try: delattr(tsk, "cache_sig") except AttributeError: pass return super(fc, self).runnable_status()
def __init__(self, **kw): super(BuildContext, self).__init__(**kw) self.is_install = 0 """Non-zero value when installing or uninstalling file""" self.top_dir = kw.get('top_dir', Context.top_dir) self.run_dir = kw.get('run_dir', Context.run_dir) self.post_mode = POST_AT_ONCE """post the task generators at once, group-by-group, or both""" # output directory - may be set until the nodes are considered self.out_dir = kw.get('out_dir', Context.out_dir) self.cache_dir = kw.get('cache_dir', None) if not self.cache_dir: self.cache_dir = self.out_dir + os.sep + CACHE_DIR # map names to environments, the '' must be defined self.all_envs = {} # ======================================= # # cache variables self.task_sigs = {} """Signatures of the tasks (persists between build executions)""" self.node_deps = {} """Dict of node dependencies found by :py:meth:`waflib.Task.Task.scan` (persists between build executions)""" self.raw_deps = {} """Dict of custom data returned by :py:meth:`waflib.Task.Task.scan` (persists between build executions)""" # list of folders that are already scanned # so that we do not need to stat them one more time self.cache_dir_contents = {} self.task_gen_cache_names = {} self.launch_dir = Context.launch_dir self.jobs = Options.options.jobs self.targets = Options.options.targets self.keep = Options.options.keep self.cache_global = Options.cache_global self.nocache = Options.options.nocache self.progress_bar = Options.options.progress_bar ############ stuff below has not been reviewed # Manual dependencies. self.deps_man = Utils.defaultdict(list) """Manual dependencies set by :py:meth:`waflib.Build.BuildContext.add_manual_dependency`""" # just the structure here self.current_group = 0 """ Current build group """ self.groups = [] """ List containing lists of task generators """ self.group_names = {} """
class task_gen(object): """ Instances of this class create :py:class:`waflib.Task.TaskBase` when calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread. A few notes: * The methods to call (*self.meths*) can be specified dynamically (removing, adding, ..) * The 'features' are used to add methods to self.meths and then execute them * The attribute 'path' is a node representing the location of the task generator * The tasks created are added to the attribute *tasks* * The attribute 'idx' is a counter of task generators in the same path """ mappings = {} prec = Utils.defaultdict(list) def __init__(self, *k, **kw): """ The task generator objects predefine various attributes (source, target) for possible processing by process_rule (make-like rules) or process_source (extensions, misc methods) The tasks are stored on the attribute 'tasks'. They are created by calling methods listed in self.meths *or* referenced in the attribute features A topological sort is performed to ease the method re-use. The extra key/value elements passed in kw are set as attributes """ # so we will have to play with directed acyclic graphs # detect cycles, etc self.source = '' self.target = '' self.meths = [] """ List of method names to execute (it is usually a good idea to avoid touching this) """ self.prec = Utils.defaultdict(list) """ Precedence table for sorting the methods in self.meths """ self.mappings = {} """ List of mappings {extension -> function} for processing files by extension """ self.features = [] """ List of feature names for bringing new methods in """ self.tasks = [] """ List of tasks created. """ if not 'bld' in kw: # task generators without a build context :-/ self.env = ConfigSet.ConfigSet() self.idx = 0 self.path = None else: self.bld = kw['bld'] self.env = self.bld.env.derive() self.path = self.bld.path # emulate chdir when reading scripts # provide a unique id try: self.idx = self.bld.idx[id(self.path)] = self.bld.idx.get(id(self.path), 0) + 1 except AttributeError: self.bld.idx = {} self.idx = self.bld.idx[id(self.path)] = 1 for key, val in kw.items(): setattr(self, key, val) def __str__(self): """for debugging purposes""" return "<task_gen %r declared in %s>" % (self.name, self.path.abspath()) def __repr__(self): """for debugging purposes""" lst = [] for x in self.__dict__.keys(): if x not in ['env', 'bld', 'compiled_tasks', 'tasks']: lst.append("%s=%s" % (x, repr(getattr(self, x)))) return "bld(%s) in %s" % (", ".join(lst), self.path.abspath()) def get_name(self): """ If not set, the name is computed from the target name:: def build(bld): x = bld(name='foo') x.get_name() # foo y = bld(target='bar') y.get_name() # bar :rtype: string :return: name of this task generator """ try: return self._name except AttributeError: if isinstance(self.target, list): lst = [str(x) for x in self.target] name = self._name = ','.join(lst) else: name = self._name = str(self.target) return name def set_name(self, name): self._name = name name = property(get_name, set_name) def to_list(self, val): """ Ensure that a parameter is a list :type val: string or list of string :param val: input to return as a list :rtype: list """ if isinstance(val, str): return val.split() else: return val def post(self): """ Create task objects. The following operations are performed: #. The body of this method is called only once and sets the attribute ``posted`` #. The attribute ``features`` is used to add more methods in ``self.meths`` #. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec` #. The methods are then executed in order #. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks` """ # we could add a decorator to let the task run once, but then python 2.3 will be difficult to support if getattr(self, 'posted', None): #error("OBJECT ALREADY POSTED" + str( self)) return False self.posted = True keys = set(self.meths) # add the methods listed in the features self.features = Utils.to_list(self.features) for x in self.features + ['*']: st = feats[x] if not st: if not x in Task.classes: Logs.warn('feature %r does not exist - bind at least one method to it' % x) keys.update(list(st)) # ironpython 2.7 wants the cast to list # copy the precedence table prec = {} prec_tbl = self.prec or task_gen.prec for x in prec_tbl: if x in keys: prec[x] = prec_tbl[x] # elements disconnected tmp = [] for a in keys: for x in prec.values(): if a in x: break else: tmp.append(a) tmp.sort() # topological sort out = [] while tmp: e = tmp.pop() if e in keys: out.append(e) try: nlst = prec[e] except KeyError: pass else: del prec[e] for x in nlst: for y in prec: if x in prec[y]: break else: tmp.append(x) if prec: raise Errors.WafError('Cycle detected in the method execution %r' % prec) out.reverse() self.meths = out # then we run the methods in order Logs.debug('task_gen: posting %s %d' % (self, id(self))) for x in out: try: v = getattr(self, x) except AttributeError: raise Errors.WafError('%r is not a valid task generator method' % x) Logs.debug('task_gen: -> %s (%d)' % (x, id(self))) v() Logs.debug('task_gen: posted %s' % self.name) return True def get_hook(self, node): """ :param node: Input file to process :type node: :py:class:`waflib.Tools.Node.Node` :return: A method able to process the input node by looking at the extension :rtype: function """ name = node.name for k in self.mappings: if name.endswith(k): return self.mappings[k] for k in task_gen.mappings: if name.endswith(k): return task_gen.mappings[k] raise Errors.WafError("File %r has no mapping in %r (did you forget to load a waf tool?)" % (node, task_gen.mappings.keys())) def create_task(self, name, src=None, tgt=None): """ Wrapper for creating task instances. The classes are retrieved from the context class if possible, then from the global dict Task.classes. :param name: task class name :type name: string :param src: input nodes :type src: list of :py:class:`waflib.Tools.Node.Node` :param tgt: output nodes :type tgt: list of :py:class:`waflib.Tools.Node.Node` :return: A task object :rtype: :py:class:`waflib.Task.TaskBase` """ task = Task.classes[name](env=self.env.derive(), generator=self) if src: task.set_inputs(src) if tgt: task.set_outputs(tgt) self.tasks.append(task) return task def clone(self, env): """ Make a copy of a task generator. Once the copy is made, it is necessary to ensure that the it does not create the same output files as the original, or the same files may be compiled several times. :param env: A configuration set :type env: :py:class:`waflib.ConfigSet.ConfigSet` :return: A copy :rtype: :py:class:`waflib.TaskGen.task_gen` """ newobj = self.bld() for x in self.__dict__: if x in ['env', 'bld']: continue elif x in ['path', 'features']: setattr(newobj, x, getattr(self, x)) else: setattr(newobj, x, copy.copy(getattr(self, x))) newobj.posted = False if isinstance(env, str): newobj.env = self.bld.all_envs[env].derive() else: newobj.env = env.derive() return newobj
def runnable_status(self): """ Sets the mod file outputs and the dependencies on the mod files over all Fortran tasks executed by the main thread so there are no concurrency issues """ if getattr(self, 'mod_fortran_done', None): return super(fc, self).runnable_status() # now, if we reach this part it is because this fortran task is the first in the list bld = self.generator.bld # obtain the fortran tasks lst = get_fortran_tasks(self) # disable this method for other tasks for tsk in lst: tsk.mod_fortran_done = True # wait for all the .f tasks to be ready for execution # and ensure that the scanners are called at least once for tsk in lst: ret = tsk.runnable_status() if ret == Task.ASK_LATER: # we have to wait for one of the other fortran tasks to be ready # this may deadlock if there are dependencies between fortran tasks # but this should not happen (we are setting them here!) for x in lst: x.mod_fortran_done = None return Task.ASK_LATER ins = Utils.defaultdict(set) outs = Utils.defaultdict(set) # the .mod files to create for tsk in lst: key = tsk.uid() for x in bld.raw_deps[key]: if x.startswith('MOD@'): name = bld.modfile(x.replace('MOD@', '')) node = bld.srcnode.find_or_declare(name) tsk.set_outputs(node) outs[node].add(tsk) # the .mod files to use for tsk in lst: key = tsk.uid() for x in bld.raw_deps[key]: if x.startswith('USE@'): name = bld.modfile(x.replace('USE@', '')) node = bld.srcnode.find_resource(name) if node and node not in tsk.outputs: if not node in bld.node_deps[key]: bld.node_deps[key].append(node) ins[node].add(tsk) # if the intersection matches, set the order for k in ins.keys(): for a in ins[k]: a.run_after.update(outs[k]) for x in outs[k]: self.generator.bld.producer.revdeps[x].add(a) # the scanner cannot output nodes, so we have to set them # ourselves as task.dep_nodes (additional input nodes) tmp = [] for t in outs[k]: tmp.extend(t.outputs) a.dep_nodes.extend(tmp) a.dep_nodes.sort(key=lambda x: x.abspath()) # the task objects have changed: clear the signature cache for tsk in lst: try: delattr(tsk, 'cache_sig') except AttributeError: pass return super(fc, self).runnable_status()
class task_gen(object): """ Instances of this class create :py:class:`waflib.Task.Task` when calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread. A few notes: * The methods to call (*self.meths*) can be specified dynamically (removing, adding, ..) * The 'features' are used to add methods to self.meths and then execute them * The attribute 'path' is a node representing the location of the task generator * The tasks created are added to the attribute *tasks* * The attribute 'idx' is a counter of task generators in the same path """ mappings = Utils.ordered_iter_dict() """Mappings are global file extension mappings that are retrieved in the order of definition""" prec = Utils.defaultdict(set) """Dict that holds the precedence execution rules for task generator methods""" def __init__(self, *k, **kw): """ Task generator objects predefine various attributes (source, target) for possible processing by process_rule (make-like rules) or process_source (extensions, misc methods) Tasks are stored on the attribute 'tasks'. They are created by calling methods listed in ``self.meths`` or referenced in the attribute ``features`` A topological sort is performed to execute the methods in correct order. The extra key/value elements passed in ``kw`` are set as attributes """ self.source = [] self.target = '' self.meths = [] """ List of method names to execute (internal) """ self.features = [] """ List of feature names for bringing new methods in """ self.tasks = [] """ Tasks created are added to this list """ if not 'bld' in kw: # task generators without a build context :-/ self.env = ConfigSet.ConfigSet() self.idx = 0 self.path = None else: self.bld = kw['bld'] self.env = self.bld.env.derive() self.path = kw.get( 'path', self.bld.path ) # by default, emulate chdir when reading scripts # Provide a unique index per folder # This is part of a measure to prevent output file name collisions path = self.path.abspath() try: self.idx = self.bld.idx[path] = self.bld.idx.get(path, 0) + 1 except AttributeError: self.bld.idx = {} self.idx = self.bld.idx[path] = 1 # Record the global task generator count try: self.tg_idx_count = self.bld.tg_idx_count = self.bld.tg_idx_count + 1 except AttributeError: self.tg_idx_count = self.bld.tg_idx_count = 1 for key, val in kw.items(): setattr(self, key, val) def __str__(self): """Debugging helper""" return "<task_gen %r declared in %s>" % (self.name, self.path.abspath()) def __repr__(self): """Debugging helper""" lst = [] for x in self.__dict__: if x not in ('env', 'bld', 'compiled_tasks', 'tasks'): lst.append("%s=%s" % (x, repr(getattr(self, x)))) return "bld(%s) in %s" % (", ".join(lst), self.path.abspath()) def get_cwd(self): """ Current working directory for the task generator, defaults to the build directory. This is still used in a few places but it should disappear at some point as the classes define their own working directory. :rtype: :py:class:`waflib.Node.Node` """ return self.bld.bldnode def get_name(self): """ If the attribute ``name`` is not set on the instance, the name is computed from the target name:: def build(bld): x = bld(name='foo') x.get_name() # foo y = bld(target='bar') y.get_name() # bar :rtype: string :return: name of this task generator """ try: return self._name except AttributeError: if isinstance(self.target, list): lst = [str(x) for x in self.target] name = self._name = ','.join(lst) else: name = self._name = str(self.target) return name def set_name(self, name): self._name = name name = property(get_name, set_name) def to_list(self, val): """ Ensures that a parameter is a list, see :py:func:`waflib.Utils.to_list` :type val: string or list of string :param val: input to return as a list :rtype: list """ if isinstance(val, str): return val.split() else: return val def post(self): """ Creates tasks for this task generators. The following operations are performed: #. The body of this method is called only once and sets the attribute ``posted`` #. The attribute ``features`` is used to add more methods in ``self.meths`` #. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec` #. The methods are then executed in order #. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks` """ if getattr(self, 'posted', None): return False self.posted = True keys = set(self.meths) keys.update(feats['*']) # add the methods listed in the features self.features = Utils.to_list(self.features) for x in self.features: st = feats[x] if st: keys.update(st) elif not x in Task.classes: Logs.warn( 'feature %r does not exist - bind at least one method to it?', x) # copy the precedence table prec = {} prec_tbl = self.prec for x in prec_tbl: if x in keys: prec[x] = prec_tbl[x] # elements disconnected tmp = [] for a in keys: for x in prec.values(): if a in x: break else: tmp.append(a) tmp.sort(reverse=True) # topological sort out = [] while tmp: e = tmp.pop() if e in keys: out.append(e) try: nlst = prec[e] except KeyError: pass else: del prec[e] for x in nlst: for y in prec: if x in prec[y]: break else: tmp.append(x) tmp.sort(reverse=True) if prec: buf = ['Cycle detected in the method execution:'] for k, v in prec.items(): buf.append('- %s after %s' % (k, [x for x in v if x in prec])) raise Errors.WafError('\n'.join(buf)) self.meths = out # then we run the methods in order Logs.debug('task_gen: posting %s %d', self, id(self)) for x in out: try: v = getattr(self, x) except AttributeError: raise Errors.WafError( '%r is not a valid task generator method' % x) Logs.debug('task_gen: -> %s (%d)', x, id(self)) v() Logs.debug('task_gen: posted %s', self.name) return True def get_hook(self, node): """ Returns the ``@extension`` method to call for a Node of a particular extension. :param node: Input file to process :type node: :py:class:`waflib.Tools.Node.Node` :return: A method able to process the input node by looking at the extension :rtype: function """ name = node.name for k in self.mappings: try: if name.endswith(k): return self.mappings[k] except TypeError: # regexps objects if k.match(name): return self.mappings[k] keys = list(self.mappings.keys()) raise Errors.WafError( "File %r has no mapping in %r (load a waf tool?)" % (node, keys)) def create_task(self, name, src=None, tgt=None, **kw): """ Creates task instances. :param name: task class name :type name: string :param src: input nodes :type src: list of :py:class:`waflib.Tools.Node.Node` :param tgt: output nodes :type tgt: list of :py:class:`waflib.Tools.Node.Node` :return: A task object :rtype: :py:class:`waflib.Task.Task` """ task = Task.classes[name](env=self.env.derive(), generator=self) if src: task.set_inputs(src) if tgt: task.set_outputs(tgt) task.__dict__.update(kw) self.tasks.append(task) return task def clone(self, env): """ Makes a copy of a task generator. Once the copy is made, it is necessary to ensure that the it does not create the same output files as the original, or the same files may be compiled several times. :param env: A configuration set :type env: :py:class:`waflib.ConfigSet.ConfigSet` :return: A copy :rtype: :py:class:`waflib.TaskGen.task_gen` """ newobj = self.bld() for x in self.__dict__: if x in ('env', 'bld'): continue elif x in ('path', 'features'): setattr(newobj, x, getattr(self, x)) else: setattr(newobj, x, copy.copy(getattr(self, x))) newobj.posted = False if isinstance(env, str): newobj.env = self.bld.all_envs[env].derive() else: newobj.env = env.derive() return newobj
#! /usr/bin/env python # encoding: utf-8 # WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file import os,re from waflib import Task,Utils,Node,Errors from waflib.TaskGen import after_method,before_method,feature,taskgen_method,extension from waflib.Tools import c_aliases,c_preproc,c_config,c_osx,c_tests from waflib.Configure import conf SYSTEM_LIB_PATHS=['/usr/lib64','/usr/lib','/usr/local/lib64','/usr/local/lib'] USELIB_VARS=Utils.defaultdict(set) USELIB_VARS['c']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CCDEPS','CFLAGS','ARCH']) USELIB_VARS['cxx']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CXXDEPS','CXXFLAGS','ARCH']) USELIB_VARS['d']=set(['INCLUDES','DFLAGS']) USELIB_VARS['includes']=set(['INCLUDES','FRAMEWORKPATH','ARCH']) USELIB_VARS['cprogram']=USELIB_VARS['cxxprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH']) USELIB_VARS['cshlib']=USELIB_VARS['cxxshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH']) USELIB_VARS['cstlib']=USELIB_VARS['cxxstlib']=set(['ARFLAGS','LINKDEPS']) USELIB_VARS['dprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) USELIB_VARS['dshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) USELIB_VARS['dstlib']=set(['ARFLAGS','LINKDEPS']) USELIB_VARS['asm']=set(['ASFLAGS']) @taskgen_method def create_compiled_task(self,name,node): out='%s.%d.o'%(node.name,self.idx) task=self.create_task(name,node,node.parent.find_or_declare(out)) try: self.compiled_tasks.append(task) except AttributeError: self.compiled_tasks=[task] return task
CONTINUE = 'continue' """In case of a configuration error, continue""" WAF_CONFIG_LOG = 'config.log' """Name of the configuration log file""" autoconfig = False """Execute the configuration automatically""" conf_template = '''# project %(app)s configured on %(now)s by # waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s) # using %(args)s #''' pre_conf_method_events = Utils.defaultdict(list) """List of events per conf functions that will be fired before the conf call""" post_conf_method_events = Utils.defaultdict(list) """List of events per conf functions that will be fired after the conf call""" conf_event_not_before_restrictions = Utils.defaultdict(set) """Table that tracks the ordering rules for order-restricted after-conf-method events""" DEPRECATED_FUNCTIONS = set() def deprecated(reason=None): """ Special deprecated decorator to tag functions that are marked as deprecated. These functions will be outputted to the debug log :param reason: Optional reason/message to display in the debug log
class task_gen(object): mappings = Utils.ordered_iter_dict() prec = Utils.defaultdict(set) def __init__(self, *k, **kw): self.source = [] self.target = '' self.meths = [] self.features = [] self.tasks = [] if not 'bld' in kw: self.env = ConfigSet.ConfigSet() self.idx = 0 self.path = None else: self.bld = kw['bld'] self.env = self.bld.env.derive() self.path = self.bld.path path = self.path.abspath() try: self.idx = self.bld.idx[path] = self.bld.idx.get(path, 0) + 1 except AttributeError: self.bld.idx = {} self.idx = self.bld.idx[path] = 1 try: self.tg_idx_count = self.bld.tg_idx_count = self.bld.tg_idx_count + 1 except AttributeError: self.tg_idx_count = self.bld.tg_idx_count = 1 for key, val in kw.items(): setattr(self, key, val) def __str__(self): return "<task_gen %r declared in %s>" % (self.name, self.path.abspath()) def __repr__(self): lst = [] for x in self.__dict__: if x not in ('env', 'bld', 'compiled_tasks', 'tasks'): lst.append("%s=%s" % (x, repr(getattr(self, x)))) return "bld(%s) in %s" % (", ".join(lst), self.path.abspath()) def get_cwd(self): return self.bld.bldnode def get_name(self): try: return self._name except AttributeError: if isinstance(self.target, list): lst = [str(x) for x in self.target] name = self._name = ','.join(lst) else: name = self._name = str(self.target) return name def set_name(self, name): self._name = name name = property(get_name, set_name) def to_list(self, val): if isinstance(val, str): return val.split() else: return val def post(self): if getattr(self, 'posted', None): return False self.posted = True keys = set(self.meths) keys.update(feats['*']) self.features = Utils.to_list(self.features) for x in self.features: st = feats[x] if st: keys.update(st) elif not x in Task.classes: Logs.warn( 'feature %r does not exist - bind at least one method to it?', x) prec = {} prec_tbl = self.prec for x in prec_tbl: if x in keys: prec[x] = prec_tbl[x] tmp = [] for a in keys: for x in prec.values(): if a in x: break else: tmp.append(a) tmp.sort(reverse=True) out = [] while tmp: e = tmp.pop() if e in keys: out.append(e) try: nlst = prec[e] except KeyError: pass else: del prec[e] for x in nlst: for y in prec: if x in prec[y]: break else: tmp.append(x) tmp.sort(reverse=True) if prec: buf = ['Cycle detected in the method execution:'] for k, v in prec.items(): buf.append('- %s after %s' % (k, [x for x in v if x in prec])) raise Errors.WafError('\n'.join(buf)) self.meths = out Logs.debug('task_gen: posting %s %d', self, id(self)) for x in out: try: v = getattr(self, x) except AttributeError: raise Errors.WafError( '%r is not a valid task generator method' % x) Logs.debug('task_gen: -> %s (%d)', x, id(self)) v() Logs.debug('task_gen: posted %s', self.name) return True def get_hook(self, node): name = node.name for k in self.mappings: try: if name.endswith(k): return self.mappings[k] except TypeError: if k.match(name): return self.mappings[k] keys = list(self.mappings.keys()) raise Errors.WafError( "File %r has no mapping in %r (load a waf tool?)" % (node, keys)) def create_task(self, name, src=None, tgt=None, **kw): task = Task.classes[name](env=self.env.derive(), generator=self) if src: task.set_inputs(src) if tgt: task.set_outputs(tgt) task.__dict__.update(kw) self.tasks.append(task) return task def clone(self, env): newobj = self.bld() for x in self.__dict__: if x in ('env', 'bld'): continue elif x in ('path', 'features'): setattr(newobj, x, getattr(self, x)) else: setattr(newobj, x, copy.copy(getattr(self, x))) newobj.posted = False if isinstance(env, str): newobj.env = self.bld.all_envs[env].derive() else: newobj.env = env.derive() return newobj