def run(self): if self.options.withfiles: for m in self.modules_pool: if not m.isfetched: print("#!UNFETCHED") print(m.url + '\n') else: print(path.relpath(m.path)) if m.source in [fetch.SVN, fetch.GIT]: print("# " + m.url) elif m.source == fetch.GITSUBMODULE: print("# submodule of: %s" % m.parent.url) if m.source in [fetch.SVN, fetch.GIT, fetch.LOCAL ] and m.parent: print("# parent: %s" % m.parent.url) else: print("# root module") if not len(m.files): print(" # no files") else: for f in m.files: print(" " + path.relpath(f.path, m.path)) print("") else: print("#path\tsource") for m in self.modules_pool: print( "%s\t%s" % (path.relpath(m.path), _convert_to_source_name(m.source)))
def run(self): if self.options.withfiles: for m in self.modules_pool: if not m.isfetched: print("#!UNFETCHED") print(m.url+'\n') else: print(path.relpath(m.path)) if m.source in [fetch.SVN, fetch.GIT]: print("# "+m.url) elif m.source == fetch.GITSUBMODULE: print("# submodule of: %s" % m.parent.url) if m.source in [fetch.SVN, fetch.GIT, fetch.LOCAL] and m.parent: print("# parent: %s" % m.parent.url) else: print("# root module") if not len(m.files): print(" # no files") else: for f in m.files: print(" " + path.relpath(f.path, m.path)) print("") else: print("#path\tsource") for m in self.modules_pool: print("%s\t%s" % (path.relpath(m.path), m.source))
def __init__(self, hname): super().__init__(hname) if util.path.in_smdir(self.inname): self.headertype = HeaderFile.projectheader self.outname = relpath(self.inname) else: self.headertype = HeaderFile.externalheader self.outname = self.inname
def __init__(self, path, module, library=None, vlog_opt=None, include_dirs=None): SourceFile.__init__(self, path=path, module=module, library=library) if not vlog_opt: self.vlog_opt = "" else: self.vlog_opt = vlog_opt self.include_dirs = [] if include_dirs: self.include_dirs.extend(include_dirs) self.include_dirs.append(path_mod.relpath(self.dirname))
def start(self): """Start and initialize the snapshot process""" logging.info("Finding volume for directory: %s", self.target_directory) mountpoint = getmount(self.target_directory) LOGGER.info("Mount point for directory: %s", mountpoint) self.relpath = relpath(self.target_directory, mountpoint) LOGGER.info("Path relative to snapshot mountpoint: %s", self.relpath) self.logical_volume = LogicalVolume.find_mounted(mountpoint) if not self.logical_volume: raise LVMError("No logical volume found for mountpoint=%r target_path=%r" % (mountpoint, self.target_directory)) LOGGER.info("%r is on logical volume %s/%s", self.target_directory, self.logical_volume.vg_name, self.logical_volume.lv_name) self.run_callback('init') return self.create_snapshot()
def __init__(self, tname): super().__init__(tname) self.name = tname self.outname = relpath(tname) self.encname = generate_oname(self.outname) self.inname = None #target's inname must not be used
def __init__(self, wname): super().__init__(wname) self.inname = None self.outname = relpath(wname)
def rel_path(self, dir=None): if dir is None: dir = os.getcwd() return path_mod.relpath(self.path, dir)
def fill(self, confinfo, variables): ''' fill this BuildOrder with contents for later processing configuration inheritance and configuration variable dict are used to create all the buildelements with their configuration specified by the user in the variables array attention: black magic is involved here. ''' #--------------------- #0. step: create source-for-target configurations # create new Config object, with parents=[target,source] # and save it as confinfo[targetname + "-" + sourcename] = Config(...) # later, use .get(target + "-" + source) to access properties, # the get method will do the hyperresolution targetlist = variables["build"].eval(conf.configs["project"]) message("================================== target list:") for t in targetlist: message(t) message("================================== end of target list") debug("creating source-for-target configurations...") for target in targetlist: for source in variables["use"].eval(conf.configs[target]): debug(repr(target) + " uses " + repr(source)) targetconf = conf.configs[target] sourceconf = conf.configs[source] newconf = conf.config.Config( name=targetconf.name + "-" + sourceconf.name, parents=[targetconf,sourceconf], directory=sourceconf.directory, conftype=conf.config.Config.TYPE_SRCFORTARGET ) debug("===== src-for-target configs:") debug(conf.configs["project"].treeview()) debug("===== end of configs") #--------------------- #1. step: iterate through all dependencies and fill them #create BuildElements and fill them with information #supplied by the variables configuration for target in targetlist: order_target = BuildTarget(target) targetc = conf.configs[target] for element in variables["use"].eval(conf.configs[target]): #target-source name, for later lookups st = target + "-" + element #target-source configuration stc = conf.configs[st] #this object will now be filled with information order_file = SourceFile(element) #preparation of compiler invokation #compiler: crun = variables["c"].eval(stc) + " " #compiler flags: crun += " ".join(variables["cflags"].eval(stc)) # encode the compiler flags etc objdir = abspath(variables["objdir"].eval(stc)) #the encoded name: #TODO: maybe also encode the '/' in rsource #debug("inname = " + relpath(order_file.inname)) encname = relpath(order_file.inname) + "-" + generate_oname(crun) #assemble compiler output file without extension encpathname = objdir + "/" + encname oname = encpathname + ".o" # add wanted (by config) dependency files file_depends = variables["depends"].eval(stc) for d in file_depends: d_obj = WantedDependency(d) order_file.depends_wanted.add(d_obj) #add sourcefile path itself to depends ad = variables["autodepends"].eval(stc) if md_enabled(ad): # if gcc MD enabled for this file mdfile = encpathname + ".d" order_file.mdfile = mdfile if os.path.isfile(mdfile): #if .d file exists: # add its contents as wanted dependencies for dep in parse_dfile(mdfile): dep_header = HeaderFile(dep) #TODO: ignore system headers variable if False and dep_header.headertype == HeaderFile.systemheader: pass else: final_header = self.find_merge_element(dep_header) order_file.add_dependency(final_header) else: #if MD is enabled but not yet present: # we NEED to rebuild this source order_file.needs_build = True debug(mdfile + " will be generated") #see man 1 gcc (search for -MD) crun += " -MD" # (re)generate c headers dependency file crun += " -o " + oname crun += " -c " + order_file.inname order_file.loglevel = variables["loglevel"].eval(stc) order_file.crun = crun order_file.encname = encname order_file.outname = oname order_file.objdir = objdir #prebuild command string s_prb = " ".join(variables["prebuild"].eval(stc).tolist()) if len(s_prb) > 0: order_file.prebuild = s_prb #postbuild command string s_pob = " ".join(variables["postbuild"].eval(stc).tolist()) if len(s_pob) > 0: order_file.postbuild = s_pob #add the newly created file as a dependency of the target order_target.depends_wanted.add(order_file) self.filedict_append(order_file) # <- for each target loop order_target.loglevel = variables["loglevel"].eval(conf.configs[target]) #compiler for TARGET ctrun = variables["c"].eval(targetc) + " " #compiler flags ctrun += " ".join(variables["cflags"].eval(targetc).tolist()) + " " #linker flags ctrun += " ".join(variables["ldflags"].eval(targetc).tolist()) #target output name ctrun += " -o " + abspath(target) t_prb = " ".join(variables["prebuild"].eval(targetc).tolist()) if len(t_prb) > 0: order_target.prebuild = t_prb t_pob = " ".join(variables["postbuild"].eval(targetc).tolist()) if len(t_pob) > 0: order_target.postbuild = t_pob #create wanted dependencies (by config) for this target. target_depends = variables["depends"].eval(targetc).tolist() #debug(t + "depends on: " + pprint.pformat(target_depends)) for d in target_depends: d_obj = WantedDependency(d) order_target.depends_wanted.add(d_obj) #append all object files for linkage #TODO: rewrite and relocate to somewhere else! for ofile in order_target.depends_wanted: #add all outnames of all dependency sourcefiles #to the compiler cmd line if type(ofile) == SourceFile: ctrun += " " + abspath(ofile.outname) else: continue order_target.crun = ctrun #if another target depends on this one, we need the dict entry: self.filedict_append(order_target) #include current target to the build order: self.targets.add(order_target) #---------------------- # 2. step: reuse wanted dependencies to add buildelements # to the correct hierarchy etc debug("current filedict:\n" + pprint.pformat(self.filedict)) debug("inserting and reusing dependencies:") for target in self.targets: for wanted_dependency in target.depends_wanted: debug("-> " + repr(wanted_dependency) + " " + str(type(wanted_dependency)) + " wanted for " + repr(target)) final_dep = self.find_merge_element(wanted_dependency) debug("-> using " + str(id(final_dep)) + "(" + str(type(final_dep)) + ")") target.add_dependency(final_dep)