def find_dir(self,lst): if isinstance(lst,str): lst=[x for x in Utils.split_path(lst)if x and x!='.'] node=self.find_node(lst) if node and not node.isdir(): return None return node
def find_node(self,lst): if isinstance(lst,str): lst=[x for x in Utils.split_path(lst)if x and x!='.'] cur=self for x in lst: if x=='..': cur=cur.parent or cur continue try: ch=cur.children except AttributeError: cur.children=self.dict_class() else: try: cur=ch[x] continue except KeyError: pass cur=self.__class__(x,cur) if not cur.exists(): cur.evict() return None if not cur.exists(): cur.evict() return None return cur
def make_node(self, lst): """ Returns or creates a Node object corresponding to the input path without considering the filesystem. :param lst: relative path :type lst: string or list of string :rtype: :py:class:´waflib.Node.Node´ """ if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] cur = self for x in lst: if x == '..': cur = cur.parent or cur continue try: cur = cur.children[x] except AttributeError: cur.children = self.dict_class() except KeyError: pass else: continue cur = self.__class__(x, cur) return cur
def find_node(self, lst): if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] cur = self for x in lst: if x == '..': cur = cur.parent or cur continue try: ch = cur.children except AttributeError: cur.children = self.dict_class() else: try: cur = ch[x] continue except KeyError: pass cur = self.__class__(x, cur) if not cur.exists(): cur.evict() return None if not cur.exists(): cur.evict() return None return cur
def find_or_declare(self, lst): """ Use this method in the build phase to declare output files. If 'self' is in build directory, it first tries to return an existing node object. If no Node is found, it tries to find one in the source directory. If no Node is found, a new Node object is created in the build directory, and the intermediate folders are added. :param lst: relative path :type lst: string or list of string """ if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] node = self.get_bld().search_node(lst) if node: if not os.path.isfile(node.abspath()): node.parent.mkdir() return node self = self.get_src() node = self.find_node(lst) if node: return node node = self.get_bld().make_node(lst) node.parent.mkdir() return node
def to_src_nodes(lst): """Find file nodes only in src, TaskGen.to_nodes will not work for this since it gives preference to nodes in build. """ if isinstance(lst, Node.Node): if not lst.is_src(): raise Errors.WafError('buildcopy: node %s is not in src'%lst) if not os.path.isfile(lst.abspath()): raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%lst) return lst if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] node = self.bld.path.get_src().search_node(lst) if node: if not os.path.isfile(node.abspath()): raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node) return node node = self.bld.path.get_src().find_node(lst) if node: if not os.path.isfile(node.abspath()): raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node) return node raise Errors.WafError('buildcopy: File not found in src: %s'%os.path.join(*lst))
def find_node(self, lst): if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] if lst and lst[0].startswith('\\\\') and not self.parent: node = self.ctx.root.make_node(lst[0]) node.cache_isdir = True return node.find_node(lst[1:]) cur = self for x in lst: if x == '..': cur = cur.parent or cur continue try: ch = cur.children except AttributeError: cur.children = self.dict_class() else: try: cur = ch[x] continue except KeyError: pass cur = self.__class__(x, cur) if not cur.exists(): cur.evict() return None if not cur.exists(): cur.evict() return None return cur
def find_dir(self, lst): if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] node = self.find_node(lst) if node and not node.isdir(): return None return node
def find_or_declare_win32(self, lst): # assuming that "find_or_declare" is called before the build starts, remove the calls to os.path.isfile if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] node = self.get_bld().search_node(lst) if node: if not node.isfile_cached(): try: node.parent.mkdir() except OSError: pass return node self = self.get_src() node = self.find_node(lst) if node: if not node.isfile_cached(): try: node.parent.mkdir() except OSError: pass return node node = self.get_bld().make_node(lst) node.parent.mkdir() return node
def to_src_nodes(lst): if isinstance(lst, Node.Node): if not lst.is_src(): raise Errors.WafError('buildcopy: node %s is not in src' % lst) if not os.path.isfile(lst.abspath()): raise Errors.WafError( 'buildcopy: Cannot copy directory %s (unsupported action)' % lst) return lst if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] node = self.bld.path.get_src().search_node(lst) if node: if not os.path.isfile(node.abspath()): raise Errors.WafError( 'buildcopy: Cannot copy directory %s (unsupported action)' % node) return node node = self.bld.path.get_src().find_node(lst) if node: if not os.path.isfile(node.abspath()): raise Errors.WafError( 'buildcopy: Cannot copy directory %s (unsupported action)' % node) return node raise Errors.WafError('buildcopy: File not found in src: %s' % os.path.join(*lst))
def to_nodes(lst): """Find file nodes only in src, TaskGen.to_nodes will not work for this since it gives preference to nodes in build. """ if isinstance(lst, Node.Node): if not lst.is_src(): raise Errors.WafError('buildcopy: node %s is not in src' % lst) if not os.path.isfile(lst.abspath()): raise Errors.WafError( 'buildcopy: Cannot copy directory %s (unsupported action)' % lst) return lst if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] node = self.bld.path.get_src().search_node(lst) if node: if not os.path.isfile(node.abspath()): raise Errors.WafError( 'buildcopy: Cannot copy directory %s (unsupported action)' % node) return node node = self.bld.path.get_src().find_node(lst) if node: if not os.path.isfile(node.abspath()): raise Errors.WafError( 'buildcopy: Cannot copy directory %s (unsupported action)' % node) return node raise Errors.WafError('buildcopy: File not found in src: %s' % os.path.join(*lst))
def post_run(self): if self.env.CC_NAME not in supported_compilers: return super(self.derived_msvcdeps, self).post_run() # TODO this is unlikely to work with netcache if getattr(self, 'cached', None): return Task.Task.post_run(self) bld = self.generator.bld unresolved_names = [] resolved_nodes = [] # Dynamically bind to the cache try: cached_nodes = bld.cached_nodes except AttributeError: cached_nodes = bld.cached_nodes = {} for path in self.msvcdeps_paths: node = None if os.path.isabs(path): node = path_to_node(bld.root, path, cached_nodes) else: # when calling find_resource, make sure the path does not begin with '..' base_node = bld.bldnode path = [k for k in Utils.split_path(path) if k and k != '.'] while path[0] == '..': path.pop(0) base_node = base_node.parent path = os.sep.join(path) node = path_to_node(base_node, path, cached_nodes) if not node: raise ValueError('could not find %r for %r' % (path, self)) else: if not c_preproc.go_absolute: if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)): # System library Logs.debug('msvcdeps: Ignoring system include %r', node) continue if id(node) == id(self.inputs[0]): # Self-dependency continue resolved_nodes.append(node) bld.node_deps[self.uid()] = resolved_nodes bld.raw_deps[self.uid()] = unresolved_names try: del self.cache_sig except AttributeError: pass Task.Task.post_run(self)
def find_resource(self, lst): if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] node = self.get_bld().search_node(lst) if not node: node = self.get_src().find_node(lst) if node and node.isdir(): return None return node
def find_resource(self,lst): if isinstance(lst,str): lst=[x for x in Utils.split_path(lst)if x and x!='.'] node=self.get_bld().search_node(lst) if not node: node=self.get_src().find_node(lst) if node and node.isdir(): return None return node
def find_or_declare(self, lst): if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] node = self.get_bld().search_node(lst) if node: if not os.path.isfile(node.abspath()): node.parent.mkdir() return node node = self.get_bld().make_node(lst) node.parent.mkdir() return node
def search_node(self,lst): if isinstance(lst,str): lst=[x for x in Utils.split_path(lst)if x and x!='.'] cur=self for x in lst: if x=='..': cur=cur.parent or cur else: try: cur=cur.children[x] except(AttributeError,KeyError): return None return cur
def search_node(self, lst): if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] cur = self for x in lst: if x == '..': cur = cur.parent or cur else: try: cur = cur.children[x] except (AttributeError, KeyError): return None return cur
def find_or_declare(self,lst): if isinstance(lst,str): lst=[x for x in Utils.split_path(lst)if x and x!='.'] node=self.get_bld().search_node(lst) if node: if not os.path.isfile(node.abspath()): node.parent.mkdir() return node self=self.get_src() node=self.find_node(lst) if node: return node node=self.get_bld().make_node(lst) node.parent.mkdir() return node
def run(self): output_node = self.outputs[0] output_node.parent.mkdir() with open(output_node.abspath(), 'wt') as fh: dct = {} extra_configure = getattr(self.generator, 'extra_configure', '').splitlines() indent = ' ' * 4 dct['EXTRA_CONFIGURE'] = ('\n' + indent).join(x.lstrip() for x in extra_configure) wscript_header = template_wscript_header.substitute(dct) fh.write(''.join((x.rstrip() + '\n') for x in wscript_header.splitlines())) separator = '' remove_prefix = self.install_dirs[output_node] + '/' for tgen, node in self.generators: dct = { 'NAME' : tgen.name, 'DESCRIPTION' : tgen.description, 'DISPLAYGROUP' : tgen.displaygroup, 'VERSION' : tgen.version, 'INCLUDE_PATHS' : "'%s'" % self.generator.include_folder_name, } if getattr(tgen, 'SDK', False): tmpl = template_wscript_sdk else: tmpl = template_wscript_extlib dct['TARGET'] = Utils.split_path(tgen.target)[-1] dct['PATH'] = self.install_dirs[node][len(remove_prefix):] # If a component defines public_defines we will use them, # otherwise fall back to export defines export_defines = Utils.to_list(getattr(tgen, 'public_defines', [])) +\ Utils.to_list(getattr(tgen, 'export_defines', [])) uselib = Utils.to_list(getattr(tgen, 'uselib', [])) indent = ' ' * 28 dct['EXPORT_DEFINES'] = (',\n' + indent).join("'%s'" % x for x in export_defines) dct['USELIB'] = (',\n' + indent).join("'%s'" % x for x in uselib) fh.write(separator + tmpl.substitute(dct)) separator = '\n'
def find_dir(self, lst): """ Searches for a folder on the filesystem (see :py:meth:`waflib.Node.Node.find_node`) :param lst: relative path :type lst: string or list of string :returns: The corresponding Node object or None if there is no such folder :rtype: :py:class:`waflib.Node.Node` """ if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] node = self.find_node(lst) if node and not node.isdir(): return None return node
def find_node(self, lst): """ Finds a node on the file system (files or folders), and creates the corresponding Node objects if it exists :param lst: relative path :type lst: string or list of string :returns: The corresponding Node object or None if no entry was found on the filesystem :rtype: :py:class:´waflib.Node.Node´ """ if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] if lst and lst[0].startswith('\\\\') and not self.parent: node = self.ctx.root.make_node(lst[0]) node.cache_isdir = True return node.find_node(lst[1:]) cur = self for x in lst: if x == '..': cur = cur.parent or cur continue try: ch = cur.children except AttributeError: cur.children = self.dict_class() else: try: cur = ch[x] continue except KeyError: pass # optimistic: create the node first then look if it was correct to do so cur = self.__class__(x, cur) if not cur.exists(): cur.evict() return None if not cur.exists(): cur.evict() return None return cur
def make_node(self,lst): if isinstance(lst,str): lst=[x for x in Utils.split_path(lst)if x and x!='.'] cur=self for x in lst: if x=='..': cur=cur.parent or cur continue try: cur=cur.children[x] except AttributeError: cur.children=self.dict_class() except KeyError: pass else: continue cur=self.__class__(x,cur) return cur
def find_or_declare(self, lst): if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] if lst[0].startswith('\\\\'): if len(lst) < 3: return None node = self.ctx.root.make_node(lst[0]).make_node(lst[1]) node.cache_isdir = True node.parent.cache_isdir = True ret = node.find_node(lst[2:]) if not ret: ret = node.make_node(lst[2:]) if not os.path.isfile(ret.abspath()): ret.parent.mkdir() return ret return self.find_or_declare_orig(lst)
def make_node(self, lst): if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] cur = self for x in lst: if x == '..': cur = cur.parent or cur continue try: cur = cur.children[x] except AttributeError: cur.children = self.dict_class() except KeyError: pass else: continue cur = self.__class__(x, cur) return cur
def get_node_from_dependency_path(self, path): # collect headers and add them to deps # this is ported from msvcdeps.py try: cached_nodes = self.bld.cached_nodes except: cached_nodes = self.bld.cached_nodes = {} bld = self.generator.bld lowercase = False if Utils.is_win32: (drive, _) = os.path.splitdrive(bld.srcnode.abspath()) lowercase = drive == drive.lower() correct_case_path = bld.path.abspath() correct_case_path_len = len(correct_case_path) correct_case_path_norm = os.path.normcase(correct_case_path) if os.path.isabs(path): if Utils.is_win32: # Force drive letter to match conventions of main source tree drive, tail = os.path.splitdrive(path) if os.path.normcase(path[:correct_case_path_len] ) == correct_case_path_norm: # Path is in the sandbox, force it to be correct. MSVC sometimes returns a lowercase path. path = correct_case_path + path[correct_case_path_len:] else: # Check the drive letter if lowercase and (drive != drive.lower()): path = drive.lower() + tail elif (not lowercase) and (drive != drive.upper()): path = drive.upper() + tail return path_to_node(bld.root, path, cached_nodes) else: base_node = bld.bldnode # when calling find_resource, make sure the path does not begin by '..' path = [k for k in Utils.split_path(path) if k and k != '.'] while path[0] == '..': path = path[1:] base_node = base_node.parent return path_to_node(base_node, path, cached_nodes)
def search_node(self, lst): """ Returns a Node previously defined in the data structure. The filesystem is not considered. :param lst: relative path :type lst: string or list of string :rtype: :py:class:´waflib.Node.Node´ or None if there is no entry in the Node datastructure """ if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] cur = self for x in lst: if x == '..': cur = cur.parent or cur else: try: cur = cur.children[x] except (AttributeError, KeyError): return None return cur
def find_resource(self, lst): """ Use this method in the build phase to find source files corresponding to the relative path given. First it looks up the Node data structure to find any declared Node object in the build directory. If None is found, it then considers the filesystem in the source directory. :param lst: relative path :type lst: string or list of string :returns: the corresponding Node object or None :rtype: :py:class:`waflib.Node.Node` """ if isinstance(lst, str): lst = [x for x in Utils.split_path(lst) if x and x != '.'] node = self.get_bld().search_node(lst) if not node: node = self.get_src().find_node(lst) if node and node.isdir(): return None return node
def build_libsused(self): bld = self.bld inputs = [] input_tgens = [] def libsused_get_generators_recursive(generators, tg): stack = set(Utils.to_list(getattr(tg, 'use', []))) while stack: x = bld.get_tgen_by_name(stack.pop()) if x not in generators: generators.add(x) stack.update(set(Utils.to_list(getattr(x, 'use', [])))) generators = set() referenced_node = None use = self.to_list(getattr(self, 'use', [])) if len(use) == 0: # Generate global libs_used file output = self.path.find_or_declare(self.target) install_name = output.name for g in self.bld.groups: for tg in g: features = self.to_list(getattr(tg, 'features', '')) # Only add version information of libraries referenced in programs, or other libraries if 'cprogram' in features or 'cstlib' in features: libsused_get_generators_recursive(generators, tg) if 'cstlib' in features: generators.add(tg) else: if 'cprogram' in self.features: # The 'libsused' features was enabled on this component output = self.path.find_or_declare(self.target + '_usedlibs.txt') install_name = output.name libsused_get_generators_recursive(generators, self) else: if 'nxf' in self.features: output = self.path.find_or_declare(self.target + '_usedlibs.txt') install_name = output.name tg = bld.get_tgen_by_name(use[0]) referenced_node = self.nxf_task.outputs[0] elif 'nxo' in self.features: output = self.path.find_or_declare(self.target + '_usedlibs.txt') install_name = output.name tg = self referenced_node = self.nxobuilder_task.outputs[0] else: # The component being investigated is passed in use component # This is triggered by a call to bld.generate_libsused() #generate a unique name for generated libsused file unique_name = "_generated_" + ("_".join( Utils.split_path(self.name) + Utils.split_path(self.target))) output = self.path.find_or_declare(unique_name) install_name = Utils.split_path(self.target)[-1] tg = bld.get_tgen_by_name(use[0]) if not tg.posted: tg.post() link_task = getattr(tg, 'link_task', None) if link_task: referenced_node = link_task.outputs[0] for x in use[1:]: libsused_get_generators_recursive(generators, bld.get_tgen_by_name(x)) libsused_get_generators_recursive(generators, tg) for tg in generators: if 'cprogram' in tg.features: # never include an already linked elf in usedl ibs.txt hide_from_usedlibs = True elif getattr(tg, 'hidden_from_list', False): if 'cstlib' in tg.features: hide_from_usedlibs = True elif 'cxxstlib' in tg.features: hide_from_usedlibs = True else: # Do hide external components and sdk's (they're marked with 'hidden_from_list' too) hide_from_usedlibs = False else: hide_from_usedlibs = False if not hide_from_usedlibs: if not getattr(tg, 'posted', False): tg.post() link_task = getattr(tg, 'link_task', None) if link_task: inputs.append(link_task.outputs[0]) input_tgens.append((tg, link_task.outputs[0])) elif getattr(tg, 'SDK', False) and getattr(self, 'include_SDK', False): input_tgens.append((tg, None)) task = self.libsused_task = self.create_task('LibsusedTask', inputs, [output]) task.input_tgens = input_tgens task.referenced_node = referenced_node if getattr(self, 'install_path', None): self.bld.install_as( "/".join(Utils.split_path(self.install_path) + [install_name]),\ output)
def distribute_generate_distribution(self): bld = self.bld install_path = Utils.split_path(self.install_path) include_dist_paths = set() if 'distribute_lib' in self.features: name = 'generated_' + '_'.join( Utils.split_path(self.install_path) + [self.wscript_name]) wscript_node = self.path.find_or_declare(name) tsk = self.wscript_task = self.create_task( 'distribute_generate_wscript', [], [wscript_node]) tsk.install_dirs = {} tsk.generators = [] bld.install_as('/'.join(install_path + [self.wscript_name]), wscript_node) tsk.install_dirs[wscript_node] = '/'.join(install_path) include_dist_paths.add(tuple(install_path[:])) use = self.to_list(self.use) use_ltd = self.to_list(getattr(self, 'use_ltd', [])) include_search_paths = [] for i, x in enumerate(use + use_ltd): ltd = i >= len(use) if isinstance(x, (tuple, list)): name, subdir_path = tuple(x) else: name, subdir_path = x, None tgen = self.bld.get_tgen_by_name(name) if not getattr(tgen, 'posted', None): tgen.post() # build up include search paths include_tgen = [tgen] for y in Utils.to_list(getattr(tgen, 'use', [])): include_tgen.append(self.bld.get_tgen_by_name(y)) for y in include_tgen: include_search_paths.extend(getattr(y, "include_nodes", [])[:]) for z in getattr(y, "export_includes", []): include_search_paths.append(y.path.find_node(z)) parts = tgen.name.split('/') if getattr(tgen, 'SDK', False): dist_node = None dst_path_parts = None elif set(tgen.features) & set( ['fake_lib', 'shlib', 'stlib', 'cshlib', 'cstlib']): dist_node = tgen.link_task.outputs[0] if ltd: dst_path_parts = install_path + [self.lib_ltd_folder_name] else: dst_path_parts = install_path + [self.lib_folder_name] else: dist_node = tgen.path.find_or_declare(tgen.target) if ltd: dst_path_parts = install_path + [self.firmware_ltd_folder_name] else: dst_path_parts = install_path + [self.firmware_folder_name] include_dist_paths.add(tuple(dst_path_parts[:])) if 'fake_lib' in tgen.features: # an external library is to be distributed with this distribution, extract # original path prefix = ('%s/%s/' % (parts[0], parts[1])).lower() orig_path = '/'.join( Utils.split_path(dist_node.path_from(tgen.path))) i = orig_path.lower().find(prefix) if i >= 0: dst_path_parts.extend( Utils.split_path(prefix + orig_path[i + len(prefix):])) else: Logs.warn(u'Unable to extract prefix for %s' % tgen.name) dst_path_parts.extend(Utils.split_path(orig_path)) elif dist_node is not None: if subdir_path is not None: if subdir_path.endswith('/') or subdir_path.endswith('\\'): # subdir_path is a directory: dst_path_parts.extend( Utils.split_path(subdir_path) + [dist_node.name]) else: # subdir_path is a file dst_path_parts.extend(Utils.split_path(subdir_path)) base, ext = os.path.splitext(dst_path_parts[-1].lower()) if ext not in '.nxf .nxo .rom'.split(): Logs.warn( u'Missing file extension for distribution %s. (Check for missing trailing slash sub-path?)' % ('/'.join(dst_path_parts))) # also install libsused file if available libsused_task = getattr(tgen, 'libsused_task', None) if libsused_task is not None: bld.install_as( '/'.join(dst_path_parts[0:-1] + ['%s_usedlibs.txt' % dst_path_parts[-1]]), libsused_task.outputs[0]) elif 'distribute_lib' in self.features: toolchain, toolchain_version, dummy = bld.get_name_prefix( toolchain=tgen.toolchain, platform=tgen.platform).split('/', 2) if len(parts) not in (3, 4): bld.fatal(u'Invalid generator name %s' % tgen.name) if parts[0] != toolchain: Logs.warn(u'Unexpected toolchain in generator %s' % tgen.name) if parts[1] != toolchain_version: Logs.warn(u'Unexpected toolchain version in generator %s' % tgen.name) dst_path_parts.extend([toolchain, toolchain_version]) if len(parts) == 4: os_label = parts[2] dst_path_parts.append(os_label) platform_label = tgen.platform.lower() if platform_label not in ('netx'): dst_path_parts.append(platform_label) dst_path_parts.append(dist_node.name) else: bld.fatal(u'Unable to determine install path for %s' % tgen.name) if dist_node is not None: bld.install_as('/'.join(dst_path_parts), dist_node) if 'distribute_lib' in self.features: if dist_node is not None: tsk.set_inputs(dist_node) tsk.install_dirs[dist_node] = '/'.join(dst_path_parts[:-1]) tsk.generators.append((tgen, dist_node)) for x in self.dist_includes: if isinstance(x, (tuple, list)): src_path, subdir_path = x else: src_path, subdir_path = x, None for x in include_search_paths: include_node = x.find_node(src_path) if include_node: if subdir_path is None: subdir_path = include_node.parent.path_from(x) break else: bld.fatal(u'Include file %s not found in use/use_ltd targets' % src_path) for y in include_dist_paths: dst_path_parts = list(y) + [self.include_folder_name ] + Utils.split_path(subdir_path) bld.install_files('/'.join(dst_path_parts), [include_node])
def post_run(self): # The following code is executed by threads, it is not safe, so a lock is needed... if getattr(self, 'cached', None): return Task.Task.post_run(self) name = self.outputs[0].abspath() name = re_o.sub('.d', name) txt = Utils.readf(name) #os.unlink(name) txt = txt.replace('\\\n', '') lst = txt.strip().split(':') val = ":".join(lst[1:]) val = val.split() nodes = [] bld = self.generator.bld f = re.compile("^(\.\.)[\\/](.*)$") for x in val: node = None if os.path.isabs(x): if not c_preproc.go_absolute: continue lock.acquire() try: node = bld.root.find_resource(x) finally: lock.release() else: path = bld.bldnode x = [k for k in Utils.split_path(x) if k and k != '.'] while lst and x[0] == '..': x = x[1:] path = path.parent # when calling find_resource, make sure the path does not begin by '..' try: lock.acquire() node = path.find_resource(x) finally: lock.release() if not node: raise ValueError('could not find %r for %r' % (x, self)) else: if id(node) == id(self.inputs[0]): # ignore the source file, it is already in the dependencies # this way, successful config tests may be retrieved from the cache continue nodes.append(node) Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes))) bld.node_deps[self.uid()] = nodes bld.raw_deps[self.uid()] = [] try: del self.cache_sig except: pass Task.Task.post_run(self)
def post_run(self): if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS: return super(self.derived_gccdeps, self).post_run() name = self.outputs[0].abspath() name = re_o.sub('.d', name) try: txt = Utils.readf(name) except EnvironmentError: Logs.error( 'Could not find a .d dependency file, are cflags/cxxflags overwritten?' ) raise #os.remove(name) # Compilers have the choice to either output the file's dependencies # as one large Makefile rule: # # /path/to/file.o: /path/to/dep1.h \ # /path/to/dep2.h \ # /path/to/dep3.h \ # ... # # or as many individual rules: # # /path/to/file.o: /path/to/dep1.h # /path/to/file.o: /path/to/dep2.h # /path/to/file.o: /path/to/dep3.h # ... # # So the first step is to sanitize the input by stripping out the left- # hand side of all these lines. After that, whatever remains are the # implicit dependencies of task.outputs[0] txt = '\n'.join( [remove_makefile_rule_lhs(line) for line in txt.splitlines()]) # Now join all the lines together txt = txt.replace('\\\n', '') val = txt.strip() val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x] nodes = [] bld = self.generator.bld # Dynamically bind to the cache try: cached_nodes = bld.cached_nodes except AttributeError: cached_nodes = bld.cached_nodes = {} for x in val: node = None if os.path.isabs(x): node = path_to_node(bld.root, x, cached_nodes) else: # TODO waf 1.9 - single cwd value path = getattr(bld, 'cwdx', bld.bldnode) # when calling find_resource, make sure the path does not contain '..' x = [k for k in Utils.split_path(x) if k and k != '.'] while '..' in x: idx = x.index('..') if idx == 0: x = x[1:] path = path.parent else: del x[idx] del x[idx - 1] node = path_to_node(path, x, cached_nodes) if not node: raise ValueError('could not find %r for %r' % (x, self)) if id(node) == id(self.inputs[0]): # ignore the source file, it is already in the dependencies # this way, successful config tests may be retrieved from the cache continue nodes.append(node) Logs.debug('deps: gccdeps for %s returned %s', self, nodes) bld.node_deps[self.uid()] = nodes bld.raw_deps[self.uid()] = [] try: del self.cache_sig except AttributeError: pass Task.Task.post_run(self)
def post_run(self): if self.env.CC_NAME not in supported_compilers: return super(self.derived_msvcdeps, self).post_run() # TODO this is unlikely to work with netcache if getattr(self, 'cached', None): return Task.Task.post_run(self) bld = self.generator.bld unresolved_names = [] resolved_nodes = [] lowercase = self.generator.msvcdeps_drive_lowercase correct_case_path = bld.path.abspath() correct_case_path_len = len(correct_case_path) correct_case_path_norm = os.path.normcase(correct_case_path) # Dynamically bind to the cache try: cached_nodes = bld.cached_nodes except AttributeError: cached_nodes = bld.cached_nodes = {} for path in self.msvcdeps_paths: node = None if os.path.isabs(path): # Force drive letter to match conventions of main source tree drive, tail = os.path.splitdrive(path) if os.path.normcase(path[:correct_case_path_len]) == correct_case_path_norm: # Path is in the sandbox, force it to be correct. MSVC sometimes returns a lowercase path. path = correct_case_path + path[correct_case_path_len:] else: # Check the drive letter if lowercase and (drive != drive.lower()): path = drive.lower() + tail elif (not lowercase) and (drive != drive.upper()): path = drive.upper() + tail node = path_to_node(bld.root, path, cached_nodes) else: base_node = bld.bldnode # when calling find_resource, make sure the path does not begin by '..' path = [k for k in Utils.split_path(path) if k and k != '.'] while path[0] == '..': path = path[1:] base_node = base_node.parent node = path_to_node(base_node, path, cached_nodes) if not node: raise ValueError('could not find %r for %r' % (path, self)) else: if not c_preproc.go_absolute: if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)): # System library Logs.debug('msvcdeps: Ignoring system include %r', node) continue if id(node) == id(self.inputs[0]): # Self-dependency continue resolved_nodes.append(node) bld.node_deps[self.uid()] = resolved_nodes bld.raw_deps[self.uid()] = unresolved_names try: del self.cache_sig except AttributeError: pass Task.Task.post_run(self)
def post_run(self): # The following code is executed by threads, it is not safe, so a lock is needed... if self.env.CC_NAME not in supported_compilers: return self.no_gccdeps_post_run() if getattr(self, 'cached', None): return Task.Task.post_run(self) # Do not check dependencies for disassembly and preprocessed files as this is already the final output bld = self.generator.bld if bld.is_option_true('show_preprocessed_file') or bld.is_option_true( 'show_disassembly'): return Task.Task.post_run(self) name = self.outputs[0].abspath() name = re_o.sub('.d', name) txt = Utils.readf(name) #os.remove(name) # Compilers have the choice to either output the file's dependencies # as one large Makefile rule: # # /path/to/file.o: /path/to/dep1.h \ # /path/to/dep2.h \ # /path/to/dep3.h \ # ... # # or as many individual rules: # # /path/to/file.o: /path/to/dep1.h # /path/to/file.o: /path/to/dep2.h # /path/to/file.o: /path/to/dep3.h # ... # # So the first step is to sanitize the input by stripping out the left- # hand side of all these lines. After that, whatever remains are the # implicit dependencies of task.outputs[0] txt = '\n'.join( [remove_makefile_rule_lhs(line) for line in txt.splitlines()]) # Now join all the lines together txt = txt.replace('\\\n', '') val = txt.strip() lst = val.split(':') val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x] nodes = [] # Dynamically bind to the cache try: cached_nodes = bld.cached_nodes except AttributeError: cached_nodes = bld.cached_nodes = {} for x in val: node = None # Remove leading and tailing double quotes if x[0] == '"': x = x[1:] if x[len(x) - 1] == '"': x = x[:len(x) - 1] drive_hack = False if os.path.isabs(x): # HACK: for reasons unknown, some of the android library includes have a ':' appended to the path # causing the following to fail if 'android' in self.env['PLATFORM']: if x[-1] == ':': x = x[:-1] node = path_to_node(bld.root, x, cached_nodes, drive_hack) else: path = bld.bldnode # when calling find_resource, make sure the path does not begin by '..' x = [k for k in Utils.split_path(x) if k and k != '.'] while lst and x[0] == '..': x = x[1:] path = path.parent node = path_to_node(path, x, cached_nodes, drive_hack) if not node: raise ValueError('could not find %r for %r' % (x, self)) else: if not c_preproc.go_absolute: if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)): continue if id(node) == id(self.inputs[0]): # ignore the source file, it is already in the dependencies # this way, successful config tests may be retrieved from the cache continue if node in self.outputs: # Circular dependency continue nodes.append(node) Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes))) bld.node_deps[self.uid()] = nodes bld.raw_deps[self.uid()] = [] try: del self.cache_sig except: pass Task.Task.post_run(self)
def post_run(self): # collect headers and add them to deps # this is ported from msvcdeps.py try: cached_nodes = self.bld.cached_nodes except: cached_nodes = self.bld.cached_nodes = {} bld = self.generator.bld lowercase = False if Utils.is_win32: (drive, _) = os.path.splitdrive(bld.srcnode.abspath()) lowercase = drive == drive.lower() correct_case_path = bld.path.abspath() correct_case_path_len = len(correct_case_path) correct_case_path_norm = os.path.normcase(correct_case_path) dep_node = None resolved_nodes = [] for path in self.azcg_headers: if os.path.isabs(path): if Utils.is_win32: # Force drive letter to match conventions of main source tree drive, tail = os.path.splitdrive(path) if os.path.normcase(path[:correct_case_path_len]) == correct_case_path_norm: # Path is in the sandbox, force it to be correct. MSVC sometimes returns a lowercase path. path = correct_case_path + path[correct_case_path_len:] else: # Check the drive letter if lowercase and (drive != drive.lower()): path = drive.lower() + tail elif (not lowercase) and (drive != drive.upper()): path = drive.upper() + tail dep_node = path_to_node(bld.root, path, cached_nodes) else: base_node = bld.bldnode # when calling find_resource, make sure the path does not begin by '..' path = [k for k in Utils.split_path(path) if k and k != '.'] while path[0] == '..': path = path[1:] base_node = base_node.parent dep_node = path_to_node(base_node, path, cached_nodes) if dep_node: if not (dep_node.is_child_of(bld.srcnode) or dep_node.is_child_of(bld.bldnode)): # System library Logs.debug('az_code_gen: Ignoring system include %r' % dep_node) continue if dep_node in self.inputs: # Self-dependency continue if dep_node in self.outputs: # Circular dependency continue resolved_nodes.append(dep_node) else: Logs.error('az_code_gen: Unable to find dependency file as node: {}'.format(path)) bld.node_deps[self.uid()] = resolved_nodes # force waf to recompute a full signature for this task (we may have new/deleted dependencies we need it to account for) try: del self.cache_sig except: pass self.azcg_set('AZCG_OUTPUTS', self.outputs) Task.Task.post_run(self) # Due to #includes of code generator header files, we can have an output node which is also an input node. # In addition, we are taking nodes that are not originally build nodes (e.g. header files) and building them, which alters the signature flow in Node.get_bld_sig(). # Task.post_run() default behavior is to set the Node.sig to the task signature which will change our computed task signature because our outputs are our inputs in same cases. # To mitigate this, we must restore the original signature for any file that had a non-build signature previously. # However, we do not want to alter the signature for files that will be consumed by later tasks. # Therefore, we should restore signatures on any node that is not being added to the build (any output nodes not in link_task). for output in self.outputs: if not output in self.azcg_get('link_inputs', []): output.sig = output.cache_sig = Utils.h_file(output.abspath())
def post_run(self): # The following code is executed by threads, it is not safe, so a lock is needed... if self.env.CC_NAME not in ('gcc', 'icc'): return self.no_gccdeps_post_run() if getattr(self, 'cached', None): return Task.Task.post_run(self) name = self.outputs[0].abspath() name = re_o.sub('.d', name) txt = Utils.readf(name) #os.unlink(name) txt = txt.replace('\\\n', '') lst = txt.strip().split(':') val = ":".join(lst[1:]) val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x] nodes = [] bld = self.generator.bld f = re.compile("^(\.\.)[\\/](.*)$") for x in val: node = None if os.path.isabs(x): lock.acquire() try: node = bld.root.find_resource(x) finally: lock.release() else: path = bld.bldnode x = [k for k in Utils.split_path(x) if k and k != '.'] while lst and x[0] == '..': x = x[1:] path = path.parent # when calling find_resource, make sure the path does not begin by '..' try: lock.acquire() node = path.find_resource(x) finally: lock.release() if not node: raise ValueError('could not find %r for %r' % (x, self)) else: if not c_preproc.go_absolute: if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)): continue if id(node) == id(self.inputs[0]): # ignore the source file, it is already in the dependencies # this way, successful config tests may be retrieved from the cache continue nodes.append(node) Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes))) bld.node_deps[self.uid()] = nodes bld.raw_deps[self.uid()] = [] try: del self.cache_sig except: pass Task.Task.post_run(self)
def post_run(self): # The following code is executed by threads, it is not safe, so a lock is needed... if self.env.CC_NAME not in supported_compilers: return self.no_gccdeps_post_run() if getattr(self, 'cached', None): return Task.Task.post_run(self) name = self.outputs[0].abspath() name = re_o.sub('.d', name) txt = Utils.readf(name) #os.unlink(name) # Compilers have the choice to either output the file's dependencies # as one large Makefile rule: # # /path/to/file.o: /path/to/dep1.h \ # /path/to/dep2.h \ # /path/to/dep3.h \ # ... # # or as many individual rules: # # /path/to/file.o: /path/to/dep1.h # /path/to/file.o: /path/to/dep2.h # /path/to/file.o: /path/to/dep3.h # ... # # So the first step is to sanitize the input by stripping out the left- # hand side of all these lines. After that, whatever remains are the # implicit dependencies of task.outputs[0] txt = '\n'.join([remove_makefile_rule_lhs(line) for line in txt.splitlines()]) # Now join all the lines together txt = txt.replace('\\\n', '') val = txt.strip() lst = val.split(':') val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x] nodes = [] bld = self.generator.bld for x in val: node = None if os.path.isabs(x): lock.acquire() try: node = bld.root.find_resource(x) finally: lock.release() else: path = bld.bldnode x = [k for k in Utils.split_path(x) if k and k != '.'] while lst and x[0] == '..': x = x[1:] path = path.parent # when calling find_resource, make sure the path does not begin by '..' try: lock.acquire() node = path.find_resource(x) finally: lock.release() if not node: raise ValueError('could not find %r for %r' % (x, self)) else: if not c_preproc.go_absolute: if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)): continue if id(node) == id(self.inputs[0]): # ignore the source file, it is already in the dependencies # this way, successful config tests may be retrieved from the cache continue nodes.append(node) Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes))) bld.node_deps[self.uid()] = nodes bld.raw_deps[self.uid()] = [] try: del self.cache_sig except: pass Task.Task.post_run(self)
def post_run(self): if self.env.CC_NAME not in supported_compilers: return super(derived_class, self).post_run() if getattr(self, 'cached', None): return Task.Task.post_run(self) bld = self.generator.bld unresolved_names = [] resolved_nodes = [] lowercase = self.generator.msvcdeps_drive_lowercase correct_case_path = bld.path.abspath() correct_case_path_len = len(correct_case_path) correct_case_path_norm = os.path.normcase(correct_case_path) # Dynamically bind to the cache try: cached_nodes = bld.cached_nodes except AttributeError: cached_nodes = bld.cached_nodes = {} for path in self.msvcdeps_paths: node = None if os.path.isabs(path): # Force drive letter to match conventions of main source tree drive, tail = os.path.splitdrive(path) if os.path.normcase(path[:correct_case_path_len]) == correct_case_path_norm: # Path is in the sandbox, force it to be correct. MSVC sometimes returns a lowercase path. path = correct_case_path + path[correct_case_path_len:] else: # Check the drive letter if lowercase and (drive != drive.lower()): path = drive.lower() + tail elif (not lowercase) and (drive != drive.upper()): path = drive.upper() + tail node = path_to_node(bld.root, path, cached_nodes) else: base_node = bld.bldnode # when calling find_resource, make sure the path does not begin by '..' path = [k for k in Utils.split_path(path) if k and k != '.'] while path[0] == '..': path = path[1:] base_node = base_node.parent node = path_to_node(base_node, path, cached_nodes) if not node: raise ValueError('could not find %r for %r' % (path, self)) else: if not c_preproc.go_absolute: if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)): # System library Logs.debug('msvcdeps: Ignoring system include %r' % node) continue if id(node) == id(self.inputs[0]): # Self-dependency continue resolved_nodes.append(node) bld.node_deps[self.uid()] = resolved_nodes bld.raw_deps[self.uid()] = unresolved_names # Free memory (200KB for each file in CryEngine, without UberFiles, this accumulates to 1 GB) del self.msvcdeps_paths try: del self.cache_sig except: pass Task.Task.post_run(self)
def post_run(self): # The following code is executed by threads, it is not safe, so a lock is needed... if self.env.CC_NAME not in supported_compilers: return self.no_gccdeps_post_run() if getattr(self, 'cached', None): return Task.Task.post_run(self) name = self.outputs[0].abspath() name = re_o.sub('.d', name) txt = Utils.readf(name) #os.unlink(name) # Compilers have the choice to either output the file's dependencies # as one large Makefile rule: # # /path/to/file.o: /path/to/dep1.h \ # /path/to/dep2.h \ # /path/to/dep3.h \ # ... # # or as many individual rules: # # /path/to/file.o: /path/to/dep1.h # /path/to/file.o: /path/to/dep2.h # /path/to/file.o: /path/to/dep3.h # ... # # So the first step is to sanitize the input by stripping out the left- # hand side of all these lines. After that, whatever remains are the # implicit dependencies of task.outputs[0] txt = '\n'.join( [remove_makefile_rule_lhs(line) for line in txt.splitlines()]) # Now join all the lines together txt = txt.replace('\\\n', '') val = txt.strip() lst = val.split(':') val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x] nodes = [] bld = self.generator.bld for x in val: node = None if os.path.isabs(x): lock.acquire() try: node = bld.root.find_resource(x) finally: lock.release() else: path = bld.bldnode x = [k for k in Utils.split_path(x) if k and k != '.'] while lst and x[0] == '..': x = x[1:] path = path.parent # when calling find_resource, make sure the path does not begin by '..' try: lock.acquire() node = path.find_resource(x) finally: lock.release() if not node: raise ValueError('could not find %r for %r' % (x, self)) else: if not c_preproc.go_absolute: if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)): continue if id(node) == id(self.inputs[0]): # ignore the source file, it is already in the dependencies # this way, successful config tests may be retrieved from the cache continue nodes.append(node) Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes))) bld.node_deps[self.uid()] = nodes bld.raw_deps[self.uid()] = [] try: del self.cache_sig except: pass Task.Task.post_run(self)
def post_run(self): # The following code is executed by threads, it is not safe, so a lock is needed... if self.env.CC_NAME not in supported_compilers: return self.no_gccdeps_post_run() if getattr(self, 'cached', None): return Task.Task.post_run(self) # Do not check dependencies for disassembly and preprocessed files as this is already the final output bld = self.generator.bld if bld.is_option_true('show_preprocessed_file') or bld.is_option_true('show_disassembly'): return Task.Task.post_run(self) if self.inputs[0].abspath()[-2:] == '.s': return Task.Task.post_run(self) name = self.outputs[0].abspath() name = re_o.sub('.d', name) txt = Utils.readf(name) #os.remove(name) # Compilers have the choice to either output the file's dependencies # as one large Makefile rule: # # /path/to/file.o: /path/to/dep1.h \ # /path/to/dep2.h \ # /path/to/dep3.h \ # ... # # or as many individual rules: # # /path/to/file.o: /path/to/dep1.h # /path/to/file.o: /path/to/dep2.h # /path/to/file.o: /path/to/dep3.h # ... # # So the first step is to sanitize the input by stripping out the left- # hand side of all these lines. After that, whatever remains are the # implicit dependencies of task.outputs[0] txt = '\n'.join([remove_makefile_rule_lhs(line) for line in txt.splitlines()]) # Now join all the lines together txt = txt.replace('\\\n', '') val = txt.strip() lst = val.split(':') val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x] # Orbis has outputs outed not escaped strings, hence rebuilds the correct path objects if self.env['PLATFORM'] == 'orbis': tmp = [] nIdx = 0 while nIdx < len(val): if val[nIdx][-1] != '"': tmp += [ val[nIdx] + ' ' ] nIdx += 1 while True: tmp[-1] = tmp[-1] + val[nIdx] if val[nIdx][-1] == '"': break # Found ending nIdx += 1 else: tmp += [ val[nIdx] ] nIdx += 1 val = tmp nodes = [] # Dynamically bind to the cache try: cached_nodes = bld.cached_nodes except AttributeError: cached_nodes = bld.cached_nodes = {} for x in val: node = None # Remove leading and tailing double quotes if x[0] == '"': x = x[1:] if x[len(x)-1] == '"': x = x[:len(x)-1] if os.path.isabs(x): node = path_to_node(bld.root, x, cached_nodes, self.env['PLATFORM'] == 'orbis') else: path = bld.bldnode # when calling find_resource, make sure the path does not begin by '..' x = [k for k in Utils.split_path(x) if k and k != '.'] while lst and x[0] == '..': x = x[1:] path = path.parent node = path_to_node(path, x, cached_nodes) if not node: raise ValueError('could not find %r for %r' % (x, self)) else: if not c_preproc.go_absolute: if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)): continue if id(node) == id(self.inputs[0]): # ignore the source file, it is already in the dependencies # this way, successful config tests may be retrieved from the cache continue nodes.append(node) Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes))) bld.node_deps[self.uid()] = nodes bld.raw_deps[self.uid()] = [] try: del self.cache_sig except: pass Task.Task.post_run(self)
def distribute_generate_distribution(self): bld = self.bld install_path = Utils.split_path(self.install_path) include_dist_paths = set() tsk_debug_archive = None if 'distribute_lib' in self.features: name = 'generated_' + '_' .join(Utils.split_path(self.install_path) + [self.wscript_name]) wscript_node = self.path.find_or_declare(name) tsk = self.wscript_task = self.create_task('distribute_generate_wscript', [], [wscript_node]) tsk.install_dirs = {} tsk.generators = [] bld.install_as('/'.join(install_path + [self.wscript_name]), wscript_node) tsk.install_dirs[wscript_node] = '/'.join(install_path) include_dist_paths.add(tuple(install_path[:])) install_debug_path = getattr(self, 'install_debug_path', None) if install_debug_path: name = 'generated_' + '_' .join(Utils.split_path(self.install_path) + Utils.split_path(self.install_debug_path)) debug_archive_node = self.path.find_or_declare(name) tsk_debug_archive = self.debug_archive_task = self.create_task('distribute_generate_debug', [], [debug_archive_node]) tsk_debug_archive.install_paths = {} bld.install_as(install_debug_path, debug_archive_node) use = self.to_list(self.use) use_ltd = self.to_list(getattr(self,'use_ltd', [])) include_search_paths = [] for i, x in enumerate(use + use_ltd): ltd = i >= len(use) if isinstance(x,(tuple,list)): name, subdir_path = tuple(x) else: name, subdir_path = x, None tgen = self.bld.get_tgen_by_name(name) if not getattr(tgen, 'posted', None): tgen.post() # build list of used task generators include_tgen = [tgen] for y in Utils.to_list(getattr(tgen,'use',[])): include_tgen.append(self.bld.get_tgen_by_name(y)) # additional nodes to distribute for debug purposes dist_debug_nodes = [] for y in include_tgen: # collect include search paths include_search_paths.extend(getattr(y, "include_nodes", [])[:]) # collect exported include search paths for z in getattr(y, "export_includes", []): include_search_paths.append(y.path.find_node(z)) # collect debug nodes for distribution dist_debug_nodes.extend(getattr(y, 'dist_debug_nodes', [])) parts = tgen.name.split('/') # get distribution nodes dist_nodes = getattr(tgen,'dist_nodes', []) if getattr(tgen,'SDK', False): dst_subdir_parts = None elif set(tgen.features) & set(['fake_lib', 'shlib', 'stlib', 'cshlib', 'cstlib']): try: link_task = tgen.link_task except AttributeError: reason = getattr(tgen,'tgen_disabled', None) bld.fatal(u'Library distribution target %r not available or disabled. reason: %r' % (tgen.name, reason)) if not hasattr(tgen, 'dist_nodes'): dist_nodes.append(link_task.outputs[0]) if ltd: dst_subdir_parts = install_path + [self.lib_ltd_folder_name] else: dst_subdir_parts = install_path + [self.lib_folder_name] else: if not hasattr(tgen, 'dist_nodes'): dist_nodes.append(tgen.path.find_or_declare(tgen.target)) if ltd: dst_subdir_parts = install_path + [self.firmware_ltd_folder_name] else: dst_subdir_parts = install_path + [self.firmware_folder_name] include_dist_paths.add(tuple(dst_subdir_parts[:])) # init list of destination file names dst_names = list(x.name for x in dist_nodes) if 'fake_lib' in tgen.features: # an external library is to be distributed with this distribution, extract # original path from library and reuse it if possible prefix = ('%s/%s/' % (parts[0], parts[1])).lower() orig_path = '/'.join(Utils.split_path(dist_nodes[0].path_from(tgen.path))) i = orig_path.lower().find(prefix) if i >= 0: p = Utils.split_path(prefix + orig_path[ i + len(prefix):]) else: Logs.warn(u'Unable to extract prefix for %s' % tgen.name) p = Utils.split_path(orig_path) # Last item in p is the name of the library file itself # we just want the prefix path dst_subdir_parts.extend(p[0:-1]) elif len(dist_nodes) > 0: if subdir_path is not None: if subdir_path.endswith('/') or subdir_path.endswith('\\'): # subdir_path is a directory: dst_subdir_parts.extend(Utils.split_path(subdir_path)) else: # subdir_path is a file if len(dist_nodes) > 1: bld.fatal(u'Can not distribute multiple targets to single filename for %r' % tgen.name) p = Utils.split_path(subdir_path) dst_subdir_parts.extend(p[0:-1]) dst_names[0] = p[-1] base, ext = os.path.splitext(dst_names[0].lower()) if ext not in '.nxi .nxf .nxo .rom'.split(): Logs.warn(u'Missing file extension for distribution %s. (Check for missing trailing slash sub-path?)' % ('/'.join(p))) # also install libsused file if available libsused_task = getattr(tgen, 'libsused_task', None) if libsused_task is not None: bld.install_as('/'.join( dst_subdir_parts + ['%s_usedlibs.txt' % dst_names[0]]), libsused_task.outputs[0]) elif 'distribute_lib' in self.features: toolchain, toolchain_version, dummy = bld.get_name_prefix( toolchain = tgen.toolchain,platform = tgen.platform).split('/', 2) if len(parts) not in (3,4): bld.fatal(u'Invalid generator name %s' % tgen.name) if parts[0] != toolchain: Logs.warn(u'Unexpected toolchain in generator %s' % tgen.name) if parts[1] != toolchain_version: Logs.warn(u'Unexpected toolchain version in generator %s' % tgen.name) dst_subdir_parts.extend([toolchain, toolchain_version]) if len(parts) == 4: os_label = parts[2] dst_subdir_parts.append(os_label) platform_label = tgen.platform.lower() if platform_label not in ('netx'): dst_subdir_parts.append(platform_label) else: bld.fatal(u'Unable to determine install path for %s' % tgen.name) dst_subdir = '/'.join(dst_subdir_parts or []) for node, n in zip(dist_nodes,dst_names): bld.install_as(dst_subdir + '/' + n, node) if 'distribute_lib' in self.features: tsk.set_inputs(dist_nodes) for x in dist_nodes: tsk.install_dirs[x] = '/'.join(dst_subdir_parts) tsk.generators.append((tgen, x)) if tsk_debug_archive: for node in dist_debug_nodes: tsk_debug_archive.set_inputs(node) tsk_debug_archive.install_paths[node] = dst_subdir + '/' + node.name for x in self.dist_includes: if isinstance(x,(tuple,list)): src_path, subdir_path = x else: src_path, subdir_path = x, None for x in include_search_paths: include_node = x.find_node(src_path) if include_node: if subdir_path is None: subdir_path = include_node.parent.path_from(x) break else: bld.fatal(u'Include file %s not found in use/use_ltd targets' % src_path) for y in include_dist_paths: dst_path_parts = list(y) + [self.include_folder_name] + Utils.split_path(subdir_path) bld.install_files('/'.join(dst_path_parts), [include_node])
def post_run(self): if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS: return super(self.derived_gccdeps, self).post_run() name = self.outputs[0].abspath() name = re_o.sub('.d', name) try: txt = Utils.readf(name) except EnvironmentError: Logs.error('Could not find a .d dependency file, are cflags/cxxflags overwritten?') raise #os.remove(name) # Compilers have the choice to either output the file's dependencies # as one large Makefile rule: # # /path/to/file.o: /path/to/dep1.h \ # /path/to/dep2.h \ # /path/to/dep3.h \ # ... # # or as many individual rules: # # /path/to/file.o: /path/to/dep1.h # /path/to/file.o: /path/to/dep2.h # /path/to/file.o: /path/to/dep3.h # ... # # So the first step is to sanitize the input by stripping out the left- # hand side of all these lines. After that, whatever remains are the # implicit dependencies of task.outputs[0] txt = '\n'.join([remove_makefile_rule_lhs(line) for line in txt.splitlines()]) # Now join all the lines together txt = txt.replace('\\\n', '') val = txt.strip() val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x] nodes = [] bld = self.generator.bld # Dynamically bind to the cache try: cached_nodes = bld.cached_nodes except AttributeError: cached_nodes = bld.cached_nodes = {} for x in val: node = None if os.path.isabs(x): node = path_to_node(bld.root, x, cached_nodes) else: # TODO waf 1.9 - single cwd value path = getattr(bld, 'cwdx', bld.bldnode) # when calling find_resource, make sure the path does not contain '..' x = [k for k in Utils.split_path(x) if k and k != '.'] while '..' in x: idx = x.index('..') if idx == 0: x = x[1:] path = path.parent else: del x[idx] del x[idx-1] node = path_to_node(path, x, cached_nodes) if not node: raise ValueError('could not find %r for %r' % (x, self)) if id(node) == id(self.inputs[0]): # ignore the source file, it is already in the dependencies # this way, successful config tests may be retrieved from the cache continue nodes.append(node) Logs.debug('deps: gccdeps for %s returned %s', self, nodes) bld.node_deps[self.uid()] = nodes bld.raw_deps[self.uid()] = [] try: del self.cache_sig except AttributeError: pass Task.Task.post_run(self)