def run(self): try: taskname = "Init" with self.file_lock: try: self.data_pipe = MemoryDataPipe() self.data_pipe.load(self.data_pipe_file_path) except: self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path) EdkLogger.LogClientInitialize(self.log_q) loglevel = self.data_pipe.Get("LogLevel") if not loglevel: loglevel = EdkLogger.INFO EdkLogger.SetLevel(loglevel) target = self.data_pipe.Get("P_Info").get("Target") toolchain = self.data_pipe.Get("P_Info").get("ToolChain") archlist = self.data_pipe.Get("P_Info").get("ArchList") active_p = self.data_pipe.Get("P_Info").get("ActivePlatform") workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir") PackagesPath = os.getenv("PACKAGES_PATH") mws.setWs(workspacedir, PackagesPath) self.Wa = WorkSpaceInfo(workspacedir, active_p, target, toolchain, archlist) self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp") GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines") GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines") os.environ._data = self.data_pipe.Get("Env_Var") GlobalData.gWorkspace = workspacedir GlobalData.gDisableIncludePathCheck = False GlobalData.gFdfParser = self.data_pipe.Get("FdfParser") GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath") GlobalData.gUseHashCache = self.data_pipe.Get("UseHashCache") GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource") GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest") GlobalData.gPlatformHashFile = self.data_pipe.Get( "PlatformHashFile") GlobalData.gModulePreMakeCacheStatus = dict() GlobalData.gModuleMakeCacheStatus = dict() GlobalData.gHashChainStatus = dict() GlobalData.gCMakeHashFile = dict() GlobalData.gModuleHashFile = dict() GlobalData.gFileHashDict = dict() GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get( "EnableGenfdsMultiThread") GlobalData.file_lock = self.file_lock CommandTarget = self.data_pipe.Get("CommandTarget") pcd_from_build_option = [] for pcd_tuple in self.data_pipe.Get("BuildOptPcd"): pcd_id = ".".join((pcd_tuple[0], pcd_tuple[1])) if pcd_tuple[2].strip(): pcd_id = ".".join((pcd_id, pcd_tuple[2])) pcd_from_build_option.append("=".join((pcd_id, pcd_tuple[3]))) GlobalData.BuildOptionPcd = pcd_from_build_option module_count = 0 FfsCmd = self.data_pipe.Get("FfsCommand") if FfsCmd is None: FfsCmd = {} GlobalData.FfsCmd = FfsCmd PlatformMetaFile = self.GetPlatformMetaFile( self.data_pipe.Get("P_Info").get("ActivePlatform"), self.data_pipe.Get("P_Info").get("WorkspaceDir")) while True: if self.module_queue.empty(): break if self.error_event.is_set(): break module_count += 1 module_file, module_root, module_path, module_basename, module_originalpath, module_arch, IsLib = self.module_queue.get_nowait( ) modulefullpath = os.path.join(module_root, module_file) taskname = " : ".join((modulefullpath, module_arch)) module_metafile = PathClass(module_file, module_root) if module_path: module_metafile.Path = module_path if module_basename: module_metafile.BaseName = module_basename if module_originalpath: module_metafile.OriginalPath = PathClass( module_originalpath, module_root) arch = module_arch target = self.data_pipe.Get("P_Info").get("Target") toolchain = self.data_pipe.Get("P_Info").get("ToolChain") Ma = ModuleAutoGen(self.Wa, module_metafile, target, toolchain, arch, PlatformMetaFile, self.data_pipe) Ma.IsLibrary = IsLib # SourceFileList calling sequence impact the makefile string sequence. # Create cached SourceFileList here to unify its calling sequence for both # CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile. RetVal = Ma.SourceFileList if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and CommandTarget in [ None, "", "all" ]: try: CacheResult = Ma.CanSkipbyPreMakeCache() except: CacheResult = False self.feedback_q.put(taskname) if CacheResult: self.cache_q.put( (Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", True)) continue else: self.cache_q.put( (Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", False)) Ma.CreateCodeFile(False) Ma.CreateMakeFile(False, GenFfsList=FfsCmd.get( (Ma.MetaFile.Path, Ma.Arch), [])) if GlobalData.gBinCacheSource and CommandTarget in [ None, "", "all" ]: try: CacheResult = Ma.CanSkipbyMakeCache() except: CacheResult = False self.feedback_q.put(taskname) if CacheResult: self.cache_q.put( (Ma.MetaFile.Path, Ma.Arch, "MakeCache", True)) continue else: self.cache_q.put( (Ma.MetaFile.Path, Ma.Arch, "MakeCache", False)) except Empty: pass except: self.feedback_q.put(taskname) finally: self.feedback_q.put("Done") self.cache_q.put("CacheDone")
class AutoGenWorkerInProcess(mp.Process): def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock,cache_q,log_q,error_event): mp.Process.__init__(self) self.module_queue = module_queue self.data_pipe_file_path =data_pipe_file_path self.data_pipe = None self.feedback_q = feedback_q self.PlatformMetaFileSet = {} self.file_lock = file_lock self.cache_q = cache_q self.log_q = log_q self.error_event = error_event def GetPlatformMetaFile(self,filepath,root): try: return self.PlatformMetaFileSet[(filepath,root)] except: self.PlatformMetaFileSet[(filepath,root)] = filepath return self.PlatformMetaFileSet[(filepath,root)] def run(self): try: taskname = "Init" with self.file_lock: try: self.data_pipe = MemoryDataPipe() self.data_pipe.load(self.data_pipe_file_path) except: self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path) EdkLogger.LogClientInitialize(self.log_q) loglevel = self.data_pipe.Get("LogLevel") if not loglevel: loglevel = EdkLogger.INFO EdkLogger.SetLevel(loglevel) target = self.data_pipe.Get("P_Info").get("Target") toolchain = self.data_pipe.Get("P_Info").get("ToolChain") archlist = self.data_pipe.Get("P_Info").get("ArchList") active_p = self.data_pipe.Get("P_Info").get("ActivePlatform") workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir") PackagesPath = os.getenv("PACKAGES_PATH") mws.setWs(workspacedir, PackagesPath) self.Wa = WorkSpaceInfo( workspacedir,active_p,target,toolchain,archlist ) self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp") GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines") GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines") os.environ._data = self.data_pipe.Get("Env_Var") GlobalData.gWorkspace = workspacedir GlobalData.gDisableIncludePathCheck = False GlobalData.gFdfParser = self.data_pipe.Get("FdfParser") GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath") GlobalData.gUseHashCache = self.data_pipe.Get("UseHashCache") GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource") GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest") GlobalData.gPlatformHashFile = self.data_pipe.Get("PlatformHashFile") GlobalData.gModulePreMakeCacheStatus = dict() GlobalData.gModuleMakeCacheStatus = dict() GlobalData.gHashChainStatus = dict() GlobalData.gCMakeHashFile = dict() GlobalData.gModuleHashFile = dict() GlobalData.gFileHashDict = dict() GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread") GlobalData.file_lock = self.file_lock CommandTarget = self.data_pipe.Get("CommandTarget") pcd_from_build_option = [] for pcd_tuple in self.data_pipe.Get("BuildOptPcd"): pcd_id = ".".join((pcd_tuple[0],pcd_tuple[1])) if pcd_tuple[2].strip(): pcd_id = ".".join((pcd_id,pcd_tuple[2])) pcd_from_build_option.append("=".join((pcd_id,pcd_tuple[3]))) GlobalData.BuildOptionPcd = pcd_from_build_option module_count = 0 FfsCmd = self.data_pipe.Get("FfsCommand") if FfsCmd is None: FfsCmd = {} GlobalData.FfsCmd = FfsCmd PlatformMetaFile = self.GetPlatformMetaFile(self.data_pipe.Get("P_Info").get("ActivePlatform"), self.data_pipe.Get("P_Info").get("WorkspaceDir")) while True: if self.error_event.is_set(): break module_count += 1 try: module_file,module_root,module_path,module_basename,module_originalpath,module_arch,IsLib = self.module_queue.get_nowait() except Empty: EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Fake Empty.")) time.sleep(0.01) continue if module_file is None: EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Worker get the last item in the queue.")) self.feedback_q.put("QueueEmpty") time.sleep(0.01) continue modulefullpath = os.path.join(module_root,module_file) taskname = " : ".join((modulefullpath,module_arch)) module_metafile = PathClass(module_file,module_root) if module_path: module_metafile.Path = module_path if module_basename: module_metafile.BaseName = module_basename if module_originalpath: module_metafile.OriginalPath = PathClass(module_originalpath,module_root) arch = module_arch target = self.data_pipe.Get("P_Info").get("Target") toolchain = self.data_pipe.Get("P_Info").get("ToolChain") Ma = ModuleAutoGen(self.Wa,module_metafile,target,toolchain,arch,PlatformMetaFile,self.data_pipe) Ma.IsLibrary = IsLib # SourceFileList calling sequence impact the makefile string sequence. # Create cached SourceFileList here to unify its calling sequence for both # CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile. RetVal = Ma.SourceFileList if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and CommandTarget in [None, "", "all"]: try: CacheResult = Ma.CanSkipbyPreMakeCache() except: CacheResult = False self.feedback_q.put(taskname) if CacheResult: self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", True)) continue else: self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", False)) Ma.CreateCodeFile(False) Ma.CreateMakeFile(False,GenFfsList=FfsCmd.get((Ma.MetaFile.Path, Ma.Arch),[])) Ma.CreateAsBuiltInf() if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]: try: CacheResult = Ma.CanSkipbyMakeCache() except: CacheResult = False self.feedback_q.put(taskname) if CacheResult: self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", True)) continue else: self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", False)) except Exception as e: EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), str(e))) self.feedback_q.put(taskname) finally: EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Done")) self.feedback_q.put("Done") self.cache_q.put("CacheDone") def printStatus(self): print("Processs ID: %d Run %d modules in AutoGen " % (os.getpid(),len(AutoGen.Cache()))) print("Processs ID: %d Run %d modules in AutoGenInfo " % (os.getpid(),len(AutoGenInfo.GetCache()))) groupobj = {} for buildobj in BuildDB.BuildObject.GetCache().values(): if str(buildobj).lower().endswith("dec"): try: groupobj['dec'].append(str(buildobj)) except: groupobj['dec'] = [str(buildobj)] if str(buildobj).lower().endswith("dsc"): try: groupobj['dsc'].append(str(buildobj)) except: groupobj['dsc'] = [str(buildobj)] if str(buildobj).lower().endswith("inf"): try: groupobj['inf'].append(str(buildobj)) except: groupobj['inf'] = [str(buildobj)] print("Processs ID: %d Run %d pkg in WDB " % (os.getpid(),len(groupobj.get("dec",[])))) print("Processs ID: %d Run %d pla in WDB " % (os.getpid(),len(groupobj.get("dsc",[])))) print("Processs ID: %d Run %d inf in WDB " % (os.getpid(),len(groupobj.get("inf",[]))))
def run(self): try: taskname = "Init" with self.file_lock: if not os.path.exists(self.data_pipe_file_path): self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path) self.data_pipe = MemoryDataPipe() self.data_pipe.load(self.data_pipe_file_path) EdkLogger.LogClientInitialize(self.log_q) loglevel = self.data_pipe.Get("LogLevel") if not loglevel: loglevel = EdkLogger.INFO EdkLogger.SetLevel(loglevel) target = self.data_pipe.Get("P_Info").get("Target") toolchain = self.data_pipe.Get("P_Info").get("ToolChain") archlist = self.data_pipe.Get("P_Info").get("ArchList") active_p = self.data_pipe.Get("P_Info").get("ActivePlatform") workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir") PackagesPath = os.getenv("PACKAGES_PATH") mws.setWs(workspacedir, PackagesPath) self.Wa = WorkSpaceInfo(workspacedir, active_p, target, toolchain, archlist) self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp") GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines") GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines") os.environ._data = self.data_pipe.Get("Env_Var") GlobalData.gWorkspace = workspacedir GlobalData.gDisableIncludePathCheck = False GlobalData.gFdfParser = self.data_pipe.Get("FdfParser") GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath") GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource") GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest") GlobalData.gCacheIR = self.share_data GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get( "EnableGenfdsMultiThread") GlobalData.file_lock = self.file_lock GlobalData.cache_lock = self.cache_lock CommandTarget = self.data_pipe.Get("CommandTarget") pcd_from_build_option = [] for pcd_tuple in self.data_pipe.Get("BuildOptPcd"): pcd_id = ".".join((pcd_tuple[0], pcd_tuple[1])) if pcd_tuple[2].strip(): pcd_id = ".".join((pcd_id, pcd_tuple[2])) pcd_from_build_option.append("=".join((pcd_id, pcd_tuple[3]))) GlobalData.BuildOptionPcd = pcd_from_build_option module_count = 0 FfsCmd = self.data_pipe.Get("FfsCommand") if FfsCmd is None: FfsCmd = {} GlobalData.FfsCmd = FfsCmd PlatformMetaFile = self.GetPlatformMetaFile( self.data_pipe.Get("P_Info").get("ActivePlatform"), self.data_pipe.Get("P_Info").get("WorkspaceDir")) libConstPcd = self.data_pipe.Get("LibConstPcd") Refes = self.data_pipe.Get("REFS") GlobalData.libConstPcd = libConstPcd GlobalData.Refes = Refes while True: if self.module_queue.empty(): break if self.error_event.is_set(): break module_count += 1 module_file, module_root, module_path, module_basename, module_originalpath, module_arch, IsLib = self.module_queue.get_nowait( ) modulefullpath = os.path.join(module_root, module_file) taskname = " : ".join((modulefullpath, module_arch)) module_metafile = PathClass(module_file, module_root) if module_path: module_metafile.Path = module_path if module_basename: module_metafile.BaseName = module_basename if module_originalpath: module_metafile.OriginalPath = PathClass( module_originalpath, module_root) arch = module_arch target = self.data_pipe.Get("P_Info").get("Target") toolchain = self.data_pipe.Get("P_Info").get("ToolChain") Ma = ModuleAutoGen(self.Wa, module_metafile, target, toolchain, arch, PlatformMetaFile, self.data_pipe) Ma.IsLibrary = IsLib if IsLib: if (Ma.MetaFile.File, Ma.MetaFile.Root, Ma.Arch, Ma.MetaFile.Path) in libConstPcd: Ma.ConstPcd = libConstPcd[(Ma.MetaFile.File, Ma.MetaFile.Root, Ma.Arch, Ma.MetaFile.Path)] if (Ma.MetaFile.File, Ma.MetaFile.Root, Ma.Arch, Ma.MetaFile.Path) in Refes: Ma.ReferenceModules = Refes[(Ma.MetaFile.File, Ma.MetaFile.Root, Ma.Arch, Ma.MetaFile.Path)] if GlobalData.gBinCacheSource and CommandTarget in [ None, "", "all" ]: Ma.GenModuleFilesHash(GlobalData.gCacheIR) Ma.GenPreMakefileHash(GlobalData.gCacheIR) if Ma.CanSkipbyPreMakefileCache(GlobalData.gCacheIR): continue Ma.CreateCodeFile(False) Ma.CreateMakeFile(False, GenFfsList=FfsCmd.get( (Ma.MetaFile.File, Ma.Arch), [])) if GlobalData.gBinCacheSource and CommandTarget in [ None, "", "all" ]: Ma.GenMakeHeaderFilesHash(GlobalData.gCacheIR) Ma.GenMakeHash(GlobalData.gCacheIR) if Ma.CanSkipbyMakeCache(GlobalData.gCacheIR): continue else: Ma.PrintFirstMakeCacheMissFile(GlobalData.gCacheIR) except Empty: pass except: traceback.print_exc(file=sys.stdout) self.feedback_q.put(taskname) finally: self.feedback_q.put("Done")
class AutoGenWorkerInProcess(mp.Process): def __init__(self, module_queue, data_pipe_file_path, feedback_q, file_lock, share_data, log_q, error_event): mp.Process.__init__(self) self.module_queue = module_queue self.data_pipe_file_path = data_pipe_file_path self.data_pipe = None self.feedback_q = feedback_q self.PlatformMetaFileSet = {} self.file_lock = file_lock self.share_data = share_data self.log_q = log_q self.error_event = error_event def GetPlatformMetaFile(self, filepath, root): try: return self.PlatformMetaFileSet[(filepath, root)] except: self.PlatformMetaFileSet[(filepath, root)] = filepath return self.PlatformMetaFileSet[(filepath, root)] def run(self): try: taskname = "Init" with self.file_lock: if not os.path.exists(self.data_pipe_file_path): self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path) self.data_pipe = MemoryDataPipe() self.data_pipe.load(self.data_pipe_file_path) EdkLogger.LogClientInitialize(self.log_q) loglevel = self.data_pipe.Get("LogLevel") if not loglevel: loglevel = EdkLogger.INFO EdkLogger.SetLevel(loglevel) target = self.data_pipe.Get("P_Info").get("Target") toolchain = self.data_pipe.Get("P_Info").get("ToolChain") archlist = self.data_pipe.Get("P_Info").get("ArchList") active_p = self.data_pipe.Get("P_Info").get("ActivePlatform") workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir") PackagesPath = os.getenv("PACKAGES_PATH") mws.setWs(workspacedir, PackagesPath) self.Wa = WorkSpaceInfo(workspacedir, active_p, target, toolchain, archlist) self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp") GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines") GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines") os.environ._data = self.data_pipe.Get("Env_Var") GlobalData.gWorkspace = workspacedir GlobalData.gDisableIncludePathCheck = False GlobalData.gFdfParser = self.data_pipe.Get("FdfParser") GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath") pcd_from_build_option = [] for pcd_tuple in self.data_pipe.Get("BuildOptPcd"): pcd_id = ".".join((pcd_tuple[0], pcd_tuple[1])) if pcd_tuple[2].strip(): pcd_id = ".".join((pcd_id, pcd_tuple[2])) pcd_from_build_option.append("=".join((pcd_id, pcd_tuple[3]))) GlobalData.BuildOptionPcd = pcd_from_build_option module_count = 0 FfsCmd = self.data_pipe.Get("FfsCommand") if FfsCmd is None: FfsCmd = {} PlatformMetaFile = self.GetPlatformMetaFile( self.data_pipe.Get("P_Info").get("ActivePlatform"), self.data_pipe.Get("P_Info").get("WorkspaceDir")) libConstPcd = self.data_pipe.Get("LibConstPcd") Refes = self.data_pipe.Get("REFS") while True: if self.module_queue.empty(): break if self.error_event.is_set(): break module_count += 1 module_file, module_root, module_path, module_basename, module_originalpath, module_arch, IsLib = self.module_queue.get_nowait( ) modulefullpath = os.path.join(module_root, module_file) taskname = " : ".join((modulefullpath, module_arch)) module_metafile = PathClass(module_file, module_root) if module_path: module_metafile.Path = module_path if module_basename: module_metafile.BaseName = module_basename if module_originalpath: module_metafile.OriginalPath = PathClass( module_originalpath, module_root) arch = module_arch target = self.data_pipe.Get("P_Info").get("Target") toolchain = self.data_pipe.Get("P_Info").get("ToolChain") Ma = ModuleAutoGen(self.Wa, module_metafile, target, toolchain, arch, PlatformMetaFile, self.data_pipe) Ma.IsLibrary = IsLib if IsLib: if (Ma.MetaFile.File, Ma.MetaFile.Root, Ma.Arch, Ma.MetaFile.Path) in libConstPcd: Ma.ConstPcd = libConstPcd[(Ma.MetaFile.File, Ma.MetaFile.Root, Ma.Arch, Ma.MetaFile.Path)] if (Ma.MetaFile.File, Ma.MetaFile.Root, Ma.Arch, Ma.MetaFile.Path) in Refes: Ma.ReferenceModules = Refes[(Ma.MetaFile.File, Ma.MetaFile.Root, Ma.Arch, Ma.MetaFile.Path)] Ma.CreateCodeFile(False) Ma.CreateMakeFile(False, GenFfsList=FfsCmd.get( (Ma.MetaFile.File, Ma.Arch), [])) except Empty: pass except: traceback.print_exc(file=sys.stdout) self.feedback_q.put(taskname) finally: self.feedback_q.put("Done") def printStatus(self): print("Processs ID: %d Run %d modules in AutoGen " % (os.getpid(), len(AutoGen.Cache()))) print("Processs ID: %d Run %d modules in AutoGenInfo " % (os.getpid(), len(AutoGenInfo.GetCache()))) groupobj = {} for buildobj in BuildDB.BuildObject.GetCache().values(): if str(buildobj).lower().endswith("dec"): try: groupobj['dec'].append(str(buildobj)) except: groupobj['dec'] = [str(buildobj)] if str(buildobj).lower().endswith("dsc"): try: groupobj['dsc'].append(str(buildobj)) except: groupobj['dsc'] = [str(buildobj)] if str(buildobj).lower().endswith("inf"): try: groupobj['inf'].append(str(buildobj)) except: groupobj['inf'] = [str(buildobj)] print("Processs ID: %d Run %d pkg in WDB " % (os.getpid(), len(groupobj.get("dec", [])))) print("Processs ID: %d Run %d pla in WDB " % (os.getpid(), len(groupobj.get("dsc", [])))) print("Processs ID: %d Run %d inf in WDB " % (os.getpid(), len(groupobj.get("inf", []))))