def run(self): sName=self.parser.getArgs()[0] if sName[-1]==path.sep: sName=sName[:-1] if self.parser.getOptions().tarname!=None: dName=self.parser.getOptions().tarname else: dName=sName+".tgz" if self.parser.getOptions().pyfoam: self.parser.getOptions().additional.append("PyFoam*") sol=SolutionDirectory(sName,archive=None,paraviewLink=False) if not sol.isValid(): self.error(sName,"does not look like real OpenFOAM-case because",sol.missingFiles(),"are missing or of the wrong type") if self.parser.getOptions().chemkin: sol.addToClone("chemkin") if self.opts.noPloyMesh: self.parser.getOptions().exclude.append("polyMesh") sol.packCase(dName, last=self.parser.getOptions().last, additional=self.parser.getOptions().additional, exclude=self.parser.getOptions().exclude, base=self.parser.getOptions().basename)
def run(self): sName = self.parser.getArgs()[0] if sName[-1] == path.sep: sName = sName[:-1] if self.parser.getOptions().tarname != None: dName = self.parser.getOptions().tarname else: if sName == path.curdir: dName = path.basename(path.abspath(sName)) else: dName = sName dName += ".tgz" if self.parser.getOptions().pyfoam: self.parser.getOptions().additional.append("PyFoam*") sol = SolutionDirectory(sName, archive=None, addLocalConfig=True, paraviewLink=False) if not sol.isValid(): self.error(sName, "does not look like real OpenFOAM-case because", sol.missingFiles(), "are missing or of the wrong type") if self.parser.getOptions().chemkin: sol.addToClone("chemkin") if self.opts.noPloyMesh: self.parser.getOptions().exclude.append("polyMesh") sol.packCase(dName, last=self.parser.getOptions().last, additional=self.parser.getOptions().additional, exclude=self.parser.getOptions().exclude, base=self.parser.getOptions().basename)
def recursiveCompress(self, dirName): if self.verbose > 1: print_("Recursively checking", dirName) if path.isdir(dirName): s = SolutionDirectory(dirName, archive=None, paraviewLink=False, parallel=True) if s.isValid(): try: self.compressCase(dirName) except OSError: e = sys.exc_info()[ 1] # Needed because python 2.5 does not support 'as e' self.warning("Problem processing", dirName, ":", e) return for f in listdir(dirName): name = path.join(dirName, f) try: if path.isdir(name): self.recursiveCompress(name) except OSError: e = sys.exc_info()[ 1] # Needed because python 2.5 does not support 'as e' self.warning("Problem processing", name, ":", e)
def compressCase(self, dirName, warn=False): if not path.exists(dirName): self.error("Directory", dirName, "does not exist") s = SolutionDirectory(dirName, archive=None, paraviewLink=False, parallel=True, tolerant=True) if not s.isValid(): if warn: print_("Directory", dirName, "is not an OpenFOAM-case") return self.nrDir += 1 oldNr = self.nrFiles oldUnc = self.prevSize oldCon = self.nowSize if self.verbose > 0: print_("Processing case", dirName) # compress meshes for d in glob(path.join(dirName, "*", "polyMesh")) + glob( path.join(dirName, "*", "*", "polyMesh")): if path.isdir(d): self.compressDirectory(d) # compress times for t in s: self.compressDirectory(t.name) # compress logfiles if requested if self.opts.logfile: for f in glob(path.join(dirName, "*.logfile")): self.compressFile(path.join(dirName, f)) # processor direcories for p in s.procDirs: self.compressDirectory(path.join(dirName, p)) if self.nrFiles > oldNr and self.verbose > 0: print_( " -> ", self.nrFiles - oldNr, "files compressed.", humanReadableSize((self.prevSize - oldUnc) - (self.nowSize - oldCon)), "gained")
def compressCase(self,dirName,warn=False): if not path.exists(dirName): self.error("Directory",dirName,"does not exist") s=SolutionDirectory(dirName, archive=None, paraviewLink=False, parallel=True, tolerant=True) if not s.isValid(): if warn: print_("Directory",dirName,"is not an OpenFOAM-case") return self.nrDir+=1 oldNr=self.nrFiles oldUnc=self.prevSize oldCon=self.nowSize if self.verbose>0: print_("Processing case",dirName) # compress meshes for d in glob(path.join(dirName,"*","polyMesh"))+glob(path.join(dirName,"*","*","polyMesh")): if path.isdir(d): self.compressDirectory(d) # compress times for t in s: self.compressDirectory(t.name) # compress logfiles if requested if self.opts.logfile: for f in glob(path.join(dirName,"*.logfile")): self.compressFile(path.join(dirName,f)) # processor direcories for p in s.procDirs: self.compressDirectory(path.join(dirName,p)) if self.nrFiles>oldNr and self.verbose>0: print_(" -> ",self.nrFiles-oldNr,"files compressed.", humanReadableSize((self.prevSize-oldUnc)-(self.nowSize-oldCon)),"gained")
def recursiveCompress(self,dirName): if self.verbose>1: print_("Recursively checking",dirName) if path.isdir(dirName): s=SolutionDirectory(dirName,archive=None,paraviewLink=False,parallel=True) if s.isValid(): try: self.compressCase(dirName) except OSError: e = sys.exc_info()[1] # Needed because python 2.5 does not support 'as e' self.warning("Problem processing",dirName,":",e) return for f in listdir(dirName): name=path.join(dirName,f) try: if path.isdir(name): self.recursiveCompress(name) except OSError: e = sys.exc_info()[1] # Needed because python 2.5 does not support 'as e' self.warning("Problem processing",name,":",e)
def lookForCases(d): for n in tqdm(listdir(d), unit="entries", leave=False, desc=path.basename(path.abspath(d)), disable=not self.opts.progressBar): if not self.fnmatch(n): continue cName=path.join(d,n) if path.isdir(cName): try: sol=SolutionDirectory(cName,archive=None,paraviewLink=False) if sol.isValid(): if self.opts.progress: print_("Processing",cName) data={} data["mtime"]=stat(cName)[ST_MTIME] times=sol.getTimes() try: data["first"]=times[0] except IndexError: data["first"]="None" try: data["last"]=times[-1] except IndexError: data["last"]="None" data["nrSteps"]=len(times) data["procs"]=sol.nrProcs() data["pFirst"]=-1 data["pLast"]=-1 data["nrParallel"]=-1 if self.opts.parallel: pTimes=sol.getParallelTimes() data["nrParallel"]=len(pTimes) if len(pTimes)>0: data["pFirst"]=pTimes[0] data["pLast"]=pTimes[-1] data["name"]=cName data["diskusage"]=-1 if self.opts.diskusage: data["diskusage"]=diskUsage(cName) totalDiskusage+=data["diskusage"] if self.opts.parallel: for f in listdir(cName): if re.compile("processor[0-9]+").match(f): data["mtime"]=max(stat(path.join(cName,f))[ST_MTIME],data["mtime"]) if self.opts.state or self.opts.estimateEndTime: try: data["startedAt"]=time.mktime(time.strptime(self.readState(sol,"StartedAt"))) except ValueError: data["startedAt"]="nix" if self.opts.state: try: data["nowTime"]=float(self.readState(sol,"CurrentTime")) except ValueError: data["nowTime"]=None try: data["lastOutput"]=time.mktime(time.strptime(self.readState(sol,"LastOutputSeen"))) except ValueError: data["lastOutput"]="nix" data["state"]=self.readState(sol,"TheState") if data["state"]=="Running": try: gone=time.time()-data["lastOutput"] if gone>self.opts.deadThreshold: data["state"]="Dead "+humanReadableDuration(gone) except KeyError: pass except TypeError: pass if self.opts.startEndTime or self.opts.estimateEndTime: try: ctrlDict=ParsedParameterFile(sol.controlDict(),doMacroExpansion=True) except PyFoamParserError: # Didn't work with Macro expansion. Let's try without try: ctrlDict=ParsedParameterFile(sol.controlDict()) except PyFoamParserError: ctrlDict=None if ctrlDict: data["startTime"]=ctrlDict["startTime"] data["endTime"]=ctrlDict["endTime"] else: data["startTime"]=None data["endTime"]=None if self.opts.estimateEndTime: data["endTimeEstimate"]=None if self.readState(sol,"TheState")=="Running": gone=time.time()-data["startedAt"] try: current=float(self.readState(sol,"CurrentTime")) frac=(current-data["startTime"])/(data["endTime"]-data["startTime"]) except ValueError: frac=0 if frac>0: data["endTimeEstimate"]=data["startedAt"]+gone/frac if self.opts.hgInfo: if path.isdir(path.join(cName,".hg")): from stat import ST_ATIME prevStat=stat(cName) try: data["hgInfo"]=sub.Popen(["hg", "id", "-R",cName, "-b","-n","-i"], stdout=sub.PIPE).communicate()[0].strip() except OSError: data["hgInfo"]="<hg not working>" postStat=stat(cName) if prevStat[ST_MTIME]!=postStat[ST_MTIME]: # hg seems to modify the modification time of the directory. So reset it os.utime(cName,(postStat[ST_ATIME],prevStat[ST_MTIME])) else: data["hgInfo"]="<no .hg directory>" if len(customData)>0 or self.opts.hostname: fn=None pickleFile=None if useSolverInData: data["solver"]="none found" # try to find the oldest pickled file dirAndTime=[] for f in ["pickledData","pickledUnfinishedData","pickledStartData"]: for g in glob(path.join(cName,"*.analyzed")): pName=path.join(g,f) base=path.basename(g) if base.find("PyFoamRunner.")==0: solverName=base[len("PyFoamRunner."):-len(".analyzed")] else: solverName=None if path.exists(pName): dirAndTime.append((path.getmtime(pName),solverName,pName)) dirAndTime.sort(key=lambda x:x[0]) if len(dirAndTime)>0: data["solver"]=dirAndTime[-1][1] pickleFile=dirAndTime[-1][2] solverName=data["solver"] else: solverName=self.opts.solverNameForCustom if pickleFile: fn=pickleFile else: for f in ["pickledData","pickledUnfinishedData","pickledStartData"]: fp=path.join(cName,"PyFoamRunner."+solverName+".analyzed",f) if path.exists(fp): fn=fp break pickleOK=False if fn: try: raw=pickle.Unpickler(open(fn,"rb")).load() pickleOK=True for n,spec in customData: dt=raw for k in spec: try: dt=dt[k] except KeyError: dt="No key '"+k+"'" break if isinstance(dt,string_types): break data[n]=dt if self.opts.hostname: try: data["hostname"]=raw["hostname"].split(".")[0] except KeyError: data["hostname"]="<unspecified>" except ValueError: pass if not pickleOK: for n,spec in customData: data[n]="<no file>" if self.opts.hostname: data["hostname"]="<no file>" cData.append(data) elif self.opts.recursive: # print("Recurse",cName) lookForCases(cName) except OSError: print_(cName,"is unreadable")
def lookForCases(d): for n in tqdm(listdir(d), unit="entries", leave=False, desc=path.basename(path.abspath(d)), disable=not self.opts.progressBar): if not self.fnmatch(n): continue cName = path.join(d, n) if path.isdir(cName): try: sol = SolutionDirectory(cName, archive=None, paraviewLink=False) if sol.isValid(): if self.opts.progress: print_("Processing", cName) data = {} data["mtime"] = stat(cName)[ST_MTIME] times = sol.getTimes() try: data["first"] = times[0] except IndexError: data["first"] = "None" try: data["last"] = times[-1] except IndexError: data["last"] = "None" data["nrSteps"] = len(times) data["procs"] = sol.nrProcs() data["pFirst"] = -1 data["pLast"] = -1 data["nrParallel"] = -1 if self.opts.parallel: pTimes = sol.getParallelTimes() data["nrParallel"] = len(pTimes) if len(pTimes) > 0: data["pFirst"] = pTimes[0] data["pLast"] = pTimes[-1] data["name"] = cName data["diskusage"] = -1 if self.opts.diskusage: data["diskusage"] = diskUsage(cName) totalDiskusage += data["diskusage"] if self.opts.parallel: for f in listdir(cName): if re.compile("processor[0-9]+").match(f): data["mtime"] = max( stat(path.join(cName, f))[ST_MTIME], data["mtime"]) if self.opts.state or self.opts.estimateEndTime: try: data["startedAt"] = time.mktime( time.strptime( self.readState(sol, "StartedAt"))) except ValueError: data["startedAt"] = "nix" if self.opts.state: try: data["nowTime"] = float( self.readState(sol, "CurrentTime")) except ValueError: data["nowTime"] = None try: data["lastOutput"] = time.mktime( time.strptime( self.readState( sol, "LastOutputSeen"))) except ValueError: data["lastOutput"] = "nix" data["state"] = self.readState(sol, "TheState") if data["state"] == "Running": try: gone = time.time() - data["lastOutput"] if gone > self.opts.deadThreshold: data[ "state"] = "Dead " + humanReadableDuration( gone) except KeyError: pass except TypeError: pass if self.opts.startEndTime or self.opts.estimateEndTime: try: ctrlDict = ParsedParameterFile( sol.controlDict(), doMacroExpansion=True) except PyFoamParserError: # Didn't work with Macro expansion. Let's try without try: ctrlDict = ParsedParameterFile( sol.controlDict()) except PyFoamParserError: ctrlDict = None if ctrlDict: data["startTime"] = ctrlDict["startTime"] data["endTime"] = ctrlDict["endTime"] else: data["startTime"] = None data["endTime"] = None if self.opts.estimateEndTime: data["endTimeEstimate"] = None if self.readState(sol, "TheState") == "Running": gone = time.time() - data["startedAt"] try: current = float( self.readState(sol, "CurrentTime")) frac = (current - data["startTime"] ) / (data["endTime"] - data["startTime"]) except ValueError: frac = 0 if frac > 0: data["endTimeEstimate"] = data[ "startedAt"] + gone / frac if self.opts.hgInfo: if path.isdir(path.join(cName, ".hg")): from stat import ST_ATIME prevStat = stat(cName) try: data["hgInfo"] = sub.Popen( [ "hg", "id", "-R", cName, "-b", "-n", "-i" ], stdout=sub.PIPE).communicate( )[0].strip() except OSError: data["hgInfo"] = "<hg not working>" postStat = stat(cName) if prevStat[ST_MTIME] != postStat[ST_MTIME]: # hg seems to modify the modification time of the directory. So reset it os.utime(cName, (postStat[ST_ATIME], prevStat[ST_MTIME])) else: data["hgInfo"] = "<no .hg directory>" if len(customData) > 0 or self.opts.hostname: fn = None pickleFile = None if useSolverInData: data["solver"] = "none found" # try to find the oldest pickled file dirAndTime = [] for f in [ "pickledData", "pickledUnfinishedData", "pickledStartData" ]: for g in glob( path.join(cName, "*.analyzed")): pName = path.join(g, f) base = path.basename(g) if base.find("PyFoamRunner.") == 0: solverName = base[ len("PyFoamRunner." ):-len(".analyzed")] else: solverName = None if path.exists(pName): dirAndTime.append( (path.getmtime(pName), solverName, pName)) dirAndTime.sort(key=lambda x: x[0]) if len(dirAndTime) > 0: data["solver"] = dirAndTime[-1][1] pickleFile = dirAndTime[-1][2] solverName = data["solver"] else: solverName = self.opts.solverNameForCustom if pickleFile: fn = pickleFile else: for f in [ "pickledData", "pickledUnfinishedData", "pickledStartData" ]: fp = path.join( cName, "PyFoamRunner." + solverName + ".analyzed", f) if path.exists(fp): fn = fp break pickleOK = False if fn: try: raw = pickle.Unpickler(open( fn, "rb")).load() pickleOK = True for n, spec in customData: dt = raw for k in spec: try: dt = dt[k] except KeyError: dt = "No key '" + k + "'" break if isinstance( dt, string_types): break data[n] = dt if self.opts.hostname: try: data["hostname"] = raw[ "hostname"].split(".")[0] except KeyError: data[ "hostname"] = "<unspecified>" except ValueError: pass if not pickleOK: for n, spec in customData: data[n] = "<no file>" if self.opts.hostname: data["hostname"] = "<no file>" cData.append(data) elif self.opts.recursive: # print("Recurse",cName) lookForCases(cName) except OSError: print_(cName, "is unreadable")
def run(self): dirs=self.parser.getArgs() if len(dirs)==0: dirs=[path.curdir] cData=[] totalDiskusage=0 useSolverInData=False self.hasState=False customData=[] for i,c in enumerate(self.opts.customData): lst=c.split("=") if len(lst)==2: name,spec=lst name+="_" # Make sure that there is no collision with standard-names elif len(lst)==1: name,spec="Custom%d" % (i+1),c else: self.error("Custom specification",c,"does not fit the pattern 'name=subs1::subs2::..'") customData.append((name,spec.split("::"))) if len(customData)>0 and not self.opts.solverNameForCustom: self.warning("Parameter '--solver-name-for-custom-data' should be set if '--custom-data' is used") useSolverInData=True for d in dirs: for n in listdir(d): cName=path.join(d,n) if path.isdir(cName): try: sol=SolutionDirectory(cName,archive=None,paraviewLink=False) if sol.isValid(): if self.opts.progress: print_("Processing",cName) data={} data["mtime"]=stat(cName)[ST_MTIME] times=sol.getTimes() try: data["first"]=times[0] except IndexError: data["first"]="None" try: data["last"]=times[-1] except IndexError: data["last"]="None" data["nrSteps"]=len(times) data["procs"]=sol.nrProcs() data["pFirst"]=-1 data["pLast"]=-1 data["nrParallel"]=-1 if self.opts.parallel: pTimes=sol.getParallelTimes() data["nrParallel"]=len(pTimes) if len(pTimes)>0: data["pFirst"]=pTimes[0] data["pLast"]=pTimes[-1] data["name"]=cName data["diskusage"]=-1 if self.opts.diskusage: data["diskusage"]=diskUsage(cName) totalDiskusage+=data["diskusage"] if self.opts.parallel: for f in listdir(cName): if re.compile("processor[0-9]+").match(f): data["mtime"]=max(stat(path.join(cName,f))[ST_MTIME],data["mtime"]) if self.opts.state: try: data["nowTime"]=float(self.readState(sol,"CurrentTime")) except ValueError: data["nowTime"]=None try: data["lastOutput"]=time.mktime(time.strptime(self.readState(sol,"LastOutputSeen"))) except ValueError: data["lastOutput"]="nix" data["state"]=self.readState(sol,"TheState") if self.opts.state or self.opts.estimateEndTime: try: data["startedAt"]=time.mktime(time.strptime(self.readState(sol,"StartedAt"))) except ValueError: data["startedAt"]="nix" if self.opts.startEndTime or self.opts.estimateEndTime: try: ctrlDict=ParsedParameterFile(sol.controlDict(),doMacroExpansion=True) except PyFoamParserError: # Didn't work with Macro expansion. Let's try without try: ctrlDict=ParsedParameterFile(sol.controlDict()) except PyFoamParserError: ctrlDict=None if ctrlDict: data["startTime"]=ctrlDict["startTime"] data["endTime"]=ctrlDict["endTime"] else: data["startTime"]=None data["endTime"]=None if self.opts.estimateEndTime: data["endTimeEstimate"]=None if self.readState(sol,"TheState")=="Running": gone=time.time()-data["startedAt"] try: current=float(self.readState(sol,"CurrentTime")) frac=(current-data["startTime"])/(data["endTime"]-data["startTime"]) except ValueError: frac=0 if frac>0: data["endTimeEstimate"]=data["startedAt"]+gone/frac if len(customData)>0: fn=None pickleFile=None if useSolverInData: data["solver"]="none found" # try to find the oldest pickled file for f in ["pickledData","pickledUnfinishedData","pickledStartData"]: dirAndTime=[] for g in glob(path.join(cName,"*.analyzed")): pName=path.join(g,f) base=path.basename(g) if base.find("PyFoamRunner.")==0: solverName=base[len("PyFoamRunner."):-len(".analyzed")] else: solverName=None if path.exists(pName): dirAndTime.append((path.getmtime(pName),solverName,pName)) dirAndTime.sort(key=lambda x:x[0]) if len(dirAndTime)>0: data["solver"]=dirAndTime[-1][1] pickleFile=dirAndTime[-1][2] break solverName=data["solver"] else: solverName=self.opts.solverNameForCustom if pickleFile: fn=pickleFile else: for f in ["pickledData","pickledUnfinishedData","pickledStartData"]: fp=path.join(cName,"PyFoamRunner."+solverName+".analyzed",f) if path.exists(fp): fn=fp break if fn: raw=pickle.Unpickler(open(fn)).load() for n,spec in customData: dt=raw for k in spec: try: dt=dt[k] except KeyError: dt="No key '"+k+"'" break if isinstance(dt,string_types): break data[n]=dt else: for n,spec in customData: data[n]="no file" cData.append(data) except OSError: print_(cName,"is unreadable") if self.opts.progress: print_("Sorting data") cData.sort(key=lambda x:x[self.opts.sort],reverse=self.opts.reverse) if len(cData)==0: print_("No cases found") return if self.opts.dump: print_(cData) return lens={} for k in list(cData[0].keys()): lens[k]=len(k) for c in cData: for k in ["mtime","lastOutput","startedAt","endTimeEstimate"]: try: if c[k]!=None: if self.opts.relativeTime: c[k]=datetime.timedelta(seconds=long(time.time()-c[k])) else: c[k]=time.asctime(time.localtime(c[k])) except KeyError: pass except TypeError: c[k]=None try: c["diskusage"]=humanReadableSize(c["diskusage"]) except KeyError: pass for k,v in iteritems(c): lens[k]=max(lens[k],len(str(v))) format="" spec=["mtime"," | ","first"," - ","last"," (","nrSteps",") "] if self.opts.parallel: spec+=["| ","procs"," : ","pFirst"," - ","pLast"," (","nrParallel",") | "] if self.opts.diskusage: spec+=["diskusage"," | "] if self.hasState: spec+=["nowTime"," s ","state"," | "] if self.opts.advancedState: spec+=["lastOutput"," | ","startedAt"," | "] if self.opts.estimateEndTime: if not self.opts.advancedState: spec+=["startedAt"," | "] spec+=["endTimeEstimate"," | "] if self.opts.startEndTime: spec+=["startTime"," | ","endTime"," | "] if useSolverInData: spec+=["solver"," | "] for n,s in customData: spec+=[n," | "] spec+=["name"] for i,l in enumerate(spec): if not l in list(cData[0].keys()): format+=l else: if i<len(spec)-1: format+="%%(%s)%ds" % (l,lens[l]) else: format+="%%(%s)s" % (l) if self.opts.progress: print_("Printing\n\n") header=format % dict(list(zip(list(cData[0].keys()),list(cData[0].keys())))) print_(header) print_("-"*len(header)) for d in cData: for k in list(d.keys()): d[k]=str(d[k]) print_(format % d) if self.opts.diskusage: print_("Total disk-usage:",humanReadableSize(totalDiskusage))
def run(self): dirs=self.parser.getArgs() if len(dirs)==0: dirs=[path.curdir] cData=[] totalDiskusage=0 self.hasState=False for d in dirs: for n in listdir(d): cName=path.join(d,n) if path.isdir(cName): try: sol=SolutionDirectory(cName,archive=None,paraviewLink=False) if sol.isValid(): if self.opts.progress: print_("Processing",cName) data={} data["mtime"]=stat(cName)[ST_MTIME] times=sol.getTimes() try: data["first"]=times[0] except IndexError: data["first"]="None" try: data["last"]=times[-1] except IndexError: data["last"]="None" data["nrSteps"]=len(times) data["procs"]=sol.nrProcs() data["pFirst"]=-1 data["pLast"]=-1 data["nrParallel"]=-1 if self.opts.parallel: pTimes=sol.getParallelTimes() data["nrParallel"]=len(pTimes) if len(pTimes)>0: data["pFirst"]=pTimes[0] data["pLast"]=pTimes[-1] data["name"]=cName data["diskusage"]=-1 if self.opts.diskusage: try: data["diskusage"]=int( subprocess.Popen( ["du","-sb",cName], stdout=subprocess.PIPE, stderr=open(os.devnull,"w") ).communicate()[0].split()[0]) except IndexError: # assume that this du does not support -b data["diskusage"]=int( subprocess.Popen( ["du","-sk",cName], stdout=subprocess.PIPE ).communicate()[0].split()[0])*1024 totalDiskusage+=data["diskusage"] if self.opts.parallel: for f in listdir(cName): if re.compile("processor[0-9]+").match(f): data["mtime"]=max(stat(path.join(cName,f))[ST_MTIME],data["mtime"]) if self.opts.state: try: data["nowTime"]=float(self.readState(sol,"CurrentTime")) except ValueError: data["nowTime"]=None try: data["lastOutput"]=time.mktime(time.strptime(self.readState(sol,"LastOutputSeen"))) except ValueError: data["lastOutput"]="nix" data["state"]=self.readState(sol,"TheState") if self.opts.state or self.opts.estimateEndTime: try: data["startedAt"]=time.mktime(time.strptime(self.readState(sol,"StartedAt"))) except ValueError: data["startedAt"]="nix" if self.opts.startEndTime or self.opts.estimateEndTime: try: ctrlDict=ParsedParameterFile(sol.controlDict(),doMacroExpansion=True) except PyFoamParserError: # Didn't work with Macro expansion. Let's try without ctrlDict=ParsedParameterFile(sol.controlDict()) data["startTime"]=ctrlDict["startTime"] data["endTime"]=ctrlDict["endTime"] if self.opts.estimateEndTime: data["endTimeEstimate"]=None if self.readState(sol,"TheState")=="Running": gone=time.time()-data["startedAt"] try: current=float(self.readState(sol,"CurrentTime")) frac=(current-data["startTime"])/(data["endTime"]-data["startTime"]) except ValueError: frac=0 if frac>0: data["endTimeEstimate"]=data["startedAt"]+gone/frac cData.append(data) except OSError: print_(cName,"is unreadable") if self.opts.progress: print_("Sorting data") if self.opts.reverse: cData.sort(lambda x,y:cmp(y[self.opts.sort],x[self.opts.sort])) else: cData.sort(lambda x,y:cmp(x[self.opts.sort],y[self.opts.sort])) if len(cData)==0: print_("No cases found") return if self.opts.dump: print_(cData) return lens={} for k in list(cData[0].keys()): lens[k]=len(k) for c in cData: for k in ["mtime","lastOutput","startedAt","endTimeEstimate"]: try: if c[k]!=None: if self.opts.relativeTime: c[k]=datetime.timedelta(seconds=long(time.time()-c[k])) else: c[k]=time.asctime(time.localtime(c[k])) except KeyError: pass except TypeError: c[k]=None try: c["diskusage"]=humanReadableSize(c["diskusage"]) except KeyError: pass for k,v in iteritems(c): lens[k]=max(lens[k],len(str(v))) format="" spec=["mtime"," | ","first"," - ","last"," (","nrSteps",") "] if self.opts.parallel: spec+=["| ","procs"," : ","pFirst"," - ","pLast"," (","nrParallel",") | "] if self.opts.diskusage: spec+=["diskusage"," | "] if self.hasState: spec+=["nowTime"," s ","state"," | "] if self.opts.advancedState: spec+=["lastOutput"," | ","startedAt"," | "] if self.opts.estimateEndTime: if not self.opts.advancedState: spec+=["startedAt"," | "] spec+=["endTimeEstimate"," | "] if self.opts.startEndTime: spec+=["startTime"," | ","endTime"," | "] spec+=["name"] for i,l in enumerate(spec): if not l in list(cData[0].keys()): format+=l else: if i<len(spec)-1: format+="%%(%s)%ds" % (l,lens[l]) else: format+="%%(%s)s" % (l) if self.opts.progress: print_("Printing\n\n") header=format % dict(list(zip(list(cData[0].keys()),list(cData[0].keys())))) print_(header) print_("-"*len(header)) for d in cData: for k in list(d.keys()): d[k]=str(d[k]) print_(format % d) if self.opts.diskusage: print_("Total disk-usage:",humanReadableSize(totalDiskusage))
def run(self): dirs = self.parser.getArgs() if len(dirs) == 0: dirs = [path.curdir] cData = [] totalDiskusage = 0 useSolverInData = False self.hasState = False customData = [] for i, c in enumerate(self.opts.customData): lst = c.split("=") if len(lst) == 2: name, spec = lst name += "_" # Make sure that there is no collision with standard-names elif len(lst) == 1: name, spec = "Custom%d" % (i + 1), c else: self.error("Custom specification", c, "does not fit the pattern 'name=subs1::subs2::..'") customData.append((name, spec.split("::"))) if len(customData) > 0 and not self.opts.solverNameForCustom: self.warning( "Parameter '--solver-name-for-custom-data' should be set if '--custom-data' is used" ) useSolverInData = True elif self.opts.hostname: useSolverInData = True for d in tqdm(dirs, unit="dirs", disable=not self.opts.progressBar or len(dirs) < 2): if not path.isdir(d): self.warning("There is no directory", d, "here") continue for n in tqdm(listdir(d), unit="entries", desc=path.basename(path.abspath(d)), disable=not self.opts.progressBar): if not self.fnmatch(n): continue cName = path.join(d, n) if path.isdir(cName): try: sol = SolutionDirectory(cName, archive=None, paraviewLink=False) if sol.isValid(): if self.opts.progress: print_("Processing", cName) data = {} data["mtime"] = stat(cName)[ST_MTIME] times = sol.getTimes() try: data["first"] = times[0] except IndexError: data["first"] = "None" try: data["last"] = times[-1] except IndexError: data["last"] = "None" data["nrSteps"] = len(times) data["procs"] = sol.nrProcs() data["pFirst"] = -1 data["pLast"] = -1 data["nrParallel"] = -1 if self.opts.parallel: pTimes = sol.getParallelTimes() data["nrParallel"] = len(pTimes) if len(pTimes) > 0: data["pFirst"] = pTimes[0] data["pLast"] = pTimes[-1] data["name"] = cName data["diskusage"] = -1 if self.opts.diskusage: data["diskusage"] = diskUsage(cName) totalDiskusage += data["diskusage"] if self.opts.parallel: for f in listdir(cName): if re.compile("processor[0-9]+").match(f): data["mtime"] = max( stat(path.join(cName, f))[ST_MTIME], data["mtime"]) if self.opts.state: try: data["nowTime"] = float( self.readState(sol, "CurrentTime")) except ValueError: data["nowTime"] = None try: data["lastOutput"] = time.mktime( time.strptime( self.readState( sol, "LastOutputSeen"))) except ValueError: data["lastOutput"] = "nix" data["state"] = self.readState(sol, "TheState") if self.opts.state or self.opts.estimateEndTime: try: data["startedAt"] = time.mktime( time.strptime( self.readState(sol, "StartedAt"))) except ValueError: data["startedAt"] = "nix" if self.opts.startEndTime or self.opts.estimateEndTime: try: ctrlDict = ParsedParameterFile( sol.controlDict(), doMacroExpansion=True) except PyFoamParserError: # Didn't work with Macro expansion. Let's try without try: ctrlDict = ParsedParameterFile( sol.controlDict()) except PyFoamParserError: ctrlDict = None if ctrlDict: data["startTime"] = ctrlDict["startTime"] data["endTime"] = ctrlDict["endTime"] else: data["startTime"] = None data["endTime"] = None if self.opts.estimateEndTime: data["endTimeEstimate"] = None if self.readState(sol, "TheState") == "Running": gone = time.time() - data["startedAt"] try: current = float( self.readState(sol, "CurrentTime")) frac = (current - data["startTime"] ) / (data["endTime"] - data["startTime"]) except ValueError: frac = 0 if frac > 0: data["endTimeEstimate"] = data[ "startedAt"] + gone / frac if self.opts.hgInfo: if path.isdir(path.join(cName, ".hg")): from stat import ST_ATIME prevStat = stat(cName) try: data["hgInfo"] = sub.Popen( [ "hg", "id", "-R", cName, "-b", "-n", "-i" ], stdout=sub.PIPE).communicate( )[0].strip() except OSError: data["hgInfo"] = "<hg not working>" postStat = stat(cName) if prevStat[ST_MTIME] != postStat[ST_MTIME]: # hg seems to modify the modification time of the directory. So reset it os.utime(cName, (postStat[ST_ATIME], prevStat[ST_MTIME])) else: data["hgInfo"] = "<no .hg directory>" if len(customData) > 0 or self.opts.hostname: fn = None pickleFile = None if useSolverInData: data["solver"] = "none found" # try to find the oldest pickled file for f in [ "pickledData", "pickledUnfinishedData", "pickledStartData" ]: dirAndTime = [] for g in glob( path.join(cName, "*.analyzed")): pName = path.join(g, f) base = path.basename(g) if base.find("PyFoamRunner.") == 0: solverName = base[ len("PyFoamRunner." ):-len(".analyzed")] else: solverName = None if path.exists(pName): dirAndTime.append( (path.getmtime(pName), solverName, pName)) dirAndTime.sort(key=lambda x: x[0]) if len(dirAndTime) > 0: data["solver"] = dirAndTime[-1][1] pickleFile = dirAndTime[-1][2] break solverName = data["solver"] else: solverName = self.opts.solverNameForCustom if pickleFile: fn = pickleFile else: for f in [ "pickledData", "pickledUnfinishedData", "pickledStartData" ]: fp = path.join( cName, "PyFoamRunner." + solverName + ".analyzed", f) if path.exists(fp): fn = fp break if fn: raw = pickle.Unpickler(open(fn, "rb")).load() for n, spec in customData: dt = raw for k in spec: try: dt = dt[k] except KeyError: dt = "No key '" + k + "'" break if isinstance(dt, string_types): break data[n] = dt if self.opts.hostname: try: data["hostname"] = raw[ "hostname"].split(".")[0] except KeyError: data["hostname"] = "<unspecified>" else: for n, spec in customData: data[n] = "<no file>" if self.opts.hostname: data["hostname"] = "<no file>" cData.append(data) except OSError: print_(cName, "is unreadable") if self.opts.progress: print_("Sorting data") cData.sort(key=lambda x: x[self.opts.sort], reverse=self.opts.reverse) if len(cData) == 0: print_("No cases found") return if self.opts.dump: print_(cData) return lens = {} for k in list(cData[0].keys()): lens[k] = len(k) for c in cData: for k in ["mtime", "lastOutput", "startedAt", "endTimeEstimate"]: try: if c[k] != None: if self.opts.relativeTime: c[k] = datetime.timedelta( seconds=long(time.time() - c[k])) else: c[k] = time.asctime(time.localtime(c[k])) except KeyError: pass except TypeError: c[k] = None try: c["diskusage"] = humanReadableSize(c["diskusage"]) except KeyError: pass for k, v in iteritems(c): lens[k] = max(lens[k], len(str(v))) format = "" spec = ["mtime", " | "] if self.opts.hostname: spec += ["hostname", " | "] spec += ["first", " - ", "last", " (", "nrSteps", ") "] if self.opts.parallel: spec += [ "| ", "procs", " : ", "pFirst", " - ", "pLast", " (", "nrParallel", ") | " ] if self.opts.diskusage: spec += ["diskusage", " | "] if self.hasState: spec += ["nowTime", " s ", "state", " | "] if self.opts.advancedState: spec += ["lastOutput", " | ", "startedAt", " | "] if self.opts.estimateEndTime: if not self.opts.advancedState: spec += ["startedAt", " | "] spec += ["endTimeEstimate", " | "] if self.opts.startEndTime: spec += ["startTime", " | ", "endTime", " | "] if useSolverInData: spec += ["solver", " | "] for n, s in customData: spec += [n, " | "] if self.opts.hgInfo: spec += ["hgInfo", " | "] spec += ["name"] for i, l in enumerate(spec): if not l in list(cData[0].keys()): format += l else: if i < len(spec) - 1: format += "%%(%s)%ds" % (l, lens[l]) else: format += "%%(%s)s" % (l) if self.opts.progress: print_("Printing\n\n") header = format % dict( list(zip(list(cData[0].keys()), list(cData[0].keys())))) print_(header) print_("-" * len(header)) for d in cData: for k in list(d.keys()): d[k] = str(d[k]) print_(format % d) if self.opts.diskusage: print_("Total disk-usage:", humanReadableSize(totalDiskusage))
def getValidSolutionDirectory(_caseDir): if path.isdir(path.abspath(_caseDir)) is True: sol = SolutionDirectory(_caseDir) if sol.isValid(): return sol
def run(self): dirs=self.parser.getArgs() if len(dirs)==0: dirs=[path.curdir] cData=[] totalDiskusage=0 self.hasState=False for d in dirs: for n in listdir(d): cName=path.join(d,n) if path.isdir(cName): try: sol=SolutionDirectory(cName,archive=None,paraviewLink=False) if sol.isValid(): if self.opts.progress: print "Processing",cName data={} data["mtime"]=stat(cName)[ST_MTIME] times=sol.getTimes() try: data["first"]=times[0] except IndexError: data["first"]="None" try: data["last"]=times[-1] except IndexError: data["last"]="None" data["nrSteps"]=len(times) data["procs"]=sol.nrProcs() data["pFirst"]=-1 data["pLast"]=-1 data["nrParallel"]=-1 if self.opts.parallel: pTimes=sol.getParallelTimes() data["nrParallel"]=len(pTimes) if len(pTimes)>0: data["pFirst"]=pTimes[0] data["pLast"]=pTimes[-1] data["name"]=cName data["diskusage"]=-1 if self.opts.diskusage: data["diskusage"]=int(subprocess.Popen(["du","-sm",cName], stdout=subprocess.PIPE).communicate()[0].split()[0]) totalDiskusage+=data["diskusage"] if self.opts.parallel: for f in listdir(cName): if re.compile("processor[0-9]+").match(f): data["mtime"]=max(stat(path.join(cName,f))[ST_MTIME],data["mtime"]) if self.opts.state: try: data["nowTime"]=float(self.readState(sol,"CurrentTime")) except ValueError: data["nowTime"]=None try: data["lastOutput"]=time.mktime(time.strptime(self.readState(sol,"LastOutputSeen"))) except ValueError: data["lastOutput"]="nix" try: data["startedAt"]=time.mktime(time.strptime(self.readState(sol,"StartedAt"))) except ValueError: data["startedAt"]="nix" data["state"]=self.readState(sol,"TheState") cData.append(data) except OSError: print cName,"is unreadable" if self.opts.progress: print "Sorting data" if self.opts.reverse: cData.sort(lambda x,y:cmp(y[self.opts.sort],x[self.opts.sort])) else: cData.sort(lambda x,y:cmp(x[self.opts.sort],y[self.opts.sort])) if len(cData)==0: print "No cases found" return if self.opts.dump: print cData return lens={} for k in cData[0].keys(): lens[k]=len(k) for c in cData: for k in ["mtime","lastOutput","startedAt"]: try: if self.opts.relativeTime: c[k]=datetime.timedelta(seconds=long(time.time()-c[k])) else: c[k]=time.asctime(time.localtime(c[k])) except KeyError: pass except TypeError: c[k]=None for k,v in c.iteritems(): lens[k]=max(lens[k],len(str(v))) format="" spec=["mtime"," | ","first"," - ","last"," (","nrSteps",") "] if self.opts.parallel: spec+=["| ","procs"," : ","pFirst"," - ","pLast"," (","nrParallel",") | "] if self.opts.diskusage: spec+=["diskusage"," MB "] if self.hasState: spec+=["nowTime"," s ","state"," | "] if self.opts.advancedState: spec+=["lastOutput"," | ","startedAt"," | "] spec+=["name"] for i,l in enumerate(spec): if not l in cData[0].keys(): format+=l else: if i<len(spec)-1: format+="%%(%s)%ds" % (l,lens[l]) else: format+="%%(%s)s" % (l) if self.opts.progress: print "Printing\n\n" header=format % dict(zip(cData[0].keys(),cData[0].keys())) print header print "-"*len(header) for d in cData: for k in d.keys(): d[k]=str(d[k]) print format % d if self.opts.diskusage: print "Total disk-usage:",totalDiskusage,"MB"