def lookForCases(d): for n in tqdm(listdir(d), unit="entries", leave=False, desc=path.basename(path.abspath(d)), disable=not self.opts.progressBar): if not self.fnmatch(n): continue cName = path.join(d, n) if path.isdir(cName): try: sol = SolutionDirectory(cName, archive=None, paraviewLink=False) if sol.isValid(): if self.opts.progress: print_("Processing", cName) data = {} data["mtime"] = stat(cName)[ST_MTIME] times = sol.getTimes() try: data["first"] = times[0] except IndexError: data["first"] = "None" try: data["last"] = times[-1] except IndexError: data["last"] = "None" data["nrSteps"] = len(times) data["procs"] = sol.nrProcs() data["pFirst"] = -1 data["pLast"] = -1 data["nrParallel"] = -1 if self.opts.parallel: pTimes = sol.getParallelTimes() data["nrParallel"] = len(pTimes) if len(pTimes) > 0: data["pFirst"] = pTimes[0] data["pLast"] = pTimes[-1] data["name"] = cName data["diskusage"] = -1 if self.opts.diskusage: data["diskusage"] = diskUsage(cName) totalDiskusage += data["diskusage"] if self.opts.parallel: for f in listdir(cName): if re.compile("processor[0-9]+").match(f): data["mtime"] = max( stat(path.join(cName, f))[ST_MTIME], data["mtime"]) if self.opts.state or self.opts.estimateEndTime: try: data["startedAt"] = time.mktime( time.strptime( self.readState(sol, "StartedAt"))) except ValueError: data["startedAt"] = "nix" if self.opts.state: try: data["nowTime"] = float( self.readState(sol, "CurrentTime")) except ValueError: data["nowTime"] = None try: data["lastOutput"] = time.mktime( time.strptime( self.readState( sol, "LastOutputSeen"))) except ValueError: data["lastOutput"] = "nix" data["state"] = self.readState(sol, "TheState") if data["state"] == "Running": try: gone = time.time() - data["lastOutput"] if gone > self.opts.deadThreshold: data[ "state"] = "Dead " + humanReadableDuration( gone) except KeyError: pass except TypeError: pass if self.opts.startEndTime or self.opts.estimateEndTime: try: ctrlDict = ParsedParameterFile( sol.controlDict(), doMacroExpansion=True) except PyFoamParserError: # Didn't work with Macro expansion. Let's try without try: ctrlDict = ParsedParameterFile( sol.controlDict()) except PyFoamParserError: ctrlDict = None if ctrlDict: data["startTime"] = ctrlDict["startTime"] data["endTime"] = ctrlDict["endTime"] else: data["startTime"] = None data["endTime"] = None if self.opts.estimateEndTime: data["endTimeEstimate"] = None if self.readState(sol, "TheState") == "Running": gone = time.time() - data["startedAt"] try: current = float( self.readState(sol, "CurrentTime")) frac = (current - data["startTime"] ) / (data["endTime"] - data["startTime"]) except ValueError: frac = 0 if frac > 0: data["endTimeEstimate"] = data[ "startedAt"] + gone / frac if self.opts.hgInfo: if path.isdir(path.join(cName, ".hg")): from stat import ST_ATIME prevStat = stat(cName) try: data["hgInfo"] = sub.Popen( [ "hg", "id", "-R", cName, "-b", "-n", "-i" ], stdout=sub.PIPE).communicate( )[0].strip() except OSError: data["hgInfo"] = "<hg not working>" postStat = stat(cName) if prevStat[ST_MTIME] != postStat[ST_MTIME]: # hg seems to modify the modification time of the directory. So reset it os.utime(cName, (postStat[ST_ATIME], prevStat[ST_MTIME])) else: data["hgInfo"] = "<no .hg directory>" if len(customData) > 0 or self.opts.hostname: fn = None pickleFile = None if useSolverInData: data["solver"] = "none found" # try to find the oldest pickled file dirAndTime = [] for f in [ "pickledData", "pickledUnfinishedData", "pickledStartData" ]: for g in glob( path.join(cName, "*.analyzed")): pName = path.join(g, f) base = path.basename(g) if base.find("PyFoamRunner.") == 0: solverName = base[ len("PyFoamRunner." ):-len(".analyzed")] else: solverName = None if path.exists(pName): dirAndTime.append( (path.getmtime(pName), solverName, pName)) dirAndTime.sort(key=lambda x: x[0]) if len(dirAndTime) > 0: data["solver"] = dirAndTime[-1][1] pickleFile = dirAndTime[-1][2] solverName = data["solver"] else: solverName = self.opts.solverNameForCustom if pickleFile: fn = pickleFile else: for f in [ "pickledData", "pickledUnfinishedData", "pickledStartData" ]: fp = path.join( cName, "PyFoamRunner." + solverName + ".analyzed", f) if path.exists(fp): fn = fp break pickleOK = False if fn: try: raw = pickle.Unpickler(open( fn, "rb")).load() pickleOK = True for n, spec in customData: dt = raw for k in spec: try: dt = dt[k] except KeyError: dt = "No key '" + k + "'" break if isinstance( dt, string_types): break data[n] = dt if self.opts.hostname: try: data["hostname"] = raw[ "hostname"].split(".")[0] except KeyError: data[ "hostname"] = "<unspecified>" except ValueError: pass if not pickleOK: for n, spec in customData: data[n] = "<no file>" if self.opts.hostname: data["hostname"] = "<no file>" cData.append(data) elif self.opts.recursive: # print("Recurse",cName) lookForCases(cName) except OSError: print_(cName, "is unreadable")
def lookForCases(d): for n in tqdm(listdir(d), unit="entries", leave=False, desc=path.basename(path.abspath(d)), disable=not self.opts.progressBar): if not self.fnmatch(n): continue cName=path.join(d,n) if path.isdir(cName): try: sol=SolutionDirectory(cName,archive=None,paraviewLink=False) if sol.isValid(): if self.opts.progress: print_("Processing",cName) data={} data["mtime"]=stat(cName)[ST_MTIME] times=sol.getTimes() try: data["first"]=times[0] except IndexError: data["first"]="None" try: data["last"]=times[-1] except IndexError: data["last"]="None" data["nrSteps"]=len(times) data["procs"]=sol.nrProcs() data["pFirst"]=-1 data["pLast"]=-1 data["nrParallel"]=-1 if self.opts.parallel: pTimes=sol.getParallelTimes() data["nrParallel"]=len(pTimes) if len(pTimes)>0: data["pFirst"]=pTimes[0] data["pLast"]=pTimes[-1] data["name"]=cName data["diskusage"]=-1 if self.opts.diskusage: data["diskusage"]=diskUsage(cName) totalDiskusage+=data["diskusage"] if self.opts.parallel: for f in listdir(cName): if re.compile("processor[0-9]+").match(f): data["mtime"]=max(stat(path.join(cName,f))[ST_MTIME],data["mtime"]) if self.opts.state or self.opts.estimateEndTime: try: data["startedAt"]=time.mktime(time.strptime(self.readState(sol,"StartedAt"))) except ValueError: data["startedAt"]="nix" if self.opts.state: try: data["nowTime"]=float(self.readState(sol,"CurrentTime")) except ValueError: data["nowTime"]=None try: data["lastOutput"]=time.mktime(time.strptime(self.readState(sol,"LastOutputSeen"))) except ValueError: data["lastOutput"]="nix" data["state"]=self.readState(sol,"TheState") if data["state"]=="Running": try: gone=time.time()-data["lastOutput"] if gone>self.opts.deadThreshold: data["state"]="Dead "+humanReadableDuration(gone) except KeyError: pass except TypeError: pass if self.opts.startEndTime or self.opts.estimateEndTime: try: ctrlDict=ParsedParameterFile(sol.controlDict(),doMacroExpansion=True) except PyFoamParserError: # Didn't work with Macro expansion. Let's try without try: ctrlDict=ParsedParameterFile(sol.controlDict()) except PyFoamParserError: ctrlDict=None if ctrlDict: data["startTime"]=ctrlDict["startTime"] data["endTime"]=ctrlDict["endTime"] else: data["startTime"]=None data["endTime"]=None if self.opts.estimateEndTime: data["endTimeEstimate"]=None if self.readState(sol,"TheState")=="Running": gone=time.time()-data["startedAt"] try: current=float(self.readState(sol,"CurrentTime")) frac=(current-data["startTime"])/(data["endTime"]-data["startTime"]) except ValueError: frac=0 if frac>0: data["endTimeEstimate"]=data["startedAt"]+gone/frac if self.opts.hgInfo: if path.isdir(path.join(cName,".hg")): from stat import ST_ATIME prevStat=stat(cName) try: data["hgInfo"]=sub.Popen(["hg", "id", "-R",cName, "-b","-n","-i"], stdout=sub.PIPE).communicate()[0].strip() except OSError: data["hgInfo"]="<hg not working>" postStat=stat(cName) if prevStat[ST_MTIME]!=postStat[ST_MTIME]: # hg seems to modify the modification time of the directory. So reset it os.utime(cName,(postStat[ST_ATIME],prevStat[ST_MTIME])) else: data["hgInfo"]="<no .hg directory>" if len(customData)>0 or self.opts.hostname: fn=None pickleFile=None if useSolverInData: data["solver"]="none found" # try to find the oldest pickled file dirAndTime=[] for f in ["pickledData","pickledUnfinishedData","pickledStartData"]: for g in glob(path.join(cName,"*.analyzed")): pName=path.join(g,f) base=path.basename(g) if base.find("PyFoamRunner.")==0: solverName=base[len("PyFoamRunner."):-len(".analyzed")] else: solverName=None if path.exists(pName): dirAndTime.append((path.getmtime(pName),solverName,pName)) dirAndTime.sort(key=lambda x:x[0]) if len(dirAndTime)>0: data["solver"]=dirAndTime[-1][1] pickleFile=dirAndTime[-1][2] solverName=data["solver"] else: solverName=self.opts.solverNameForCustom if pickleFile: fn=pickleFile else: for f in ["pickledData","pickledUnfinishedData","pickledStartData"]: fp=path.join(cName,"PyFoamRunner."+solverName+".analyzed",f) if path.exists(fp): fn=fp break pickleOK=False if fn: try: raw=pickle.Unpickler(open(fn,"rb")).load() pickleOK=True for n,spec in customData: dt=raw for k in spec: try: dt=dt[k] except KeyError: dt="No key '"+k+"'" break if isinstance(dt,string_types): break data[n]=dt if self.opts.hostname: try: data["hostname"]=raw["hostname"].split(".")[0] except KeyError: data["hostname"]="<unspecified>" except ValueError: pass if not pickleOK: for n,spec in customData: data[n]="<no file>" if self.opts.hostname: data["hostname"]="<no file>" cData.append(data) elif self.opts.recursive: # print("Recurse",cName) lookForCases(cName) except OSError: print_(cName,"is unreadable")
def run(self): dirs=self.parser.getArgs() if len(dirs)==0: dirs=[path.curdir] cData=[] totalDiskusage=0 useSolverInData=False self.hasState=False customData=[] for i,c in enumerate(self.opts.customData): lst=c.split("=") if len(lst)==2: name,spec=lst name+="_" # Make sure that there is no collision with standard-names elif len(lst)==1: name,spec="Custom%d" % (i+1),c else: self.error("Custom specification",c,"does not fit the pattern 'name=subs1::subs2::..'") customData.append((name,spec.split("::"))) if len(customData)>0 and not self.opts.solverNameForCustom: self.warning("Parameter '--solver-name-for-custom-data' should be set if '--custom-data' is used") useSolverInData=True for d in dirs: for n in listdir(d): cName=path.join(d,n) if path.isdir(cName): try: sol=SolutionDirectory(cName,archive=None,paraviewLink=False) if sol.isValid(): if self.opts.progress: print_("Processing",cName) data={} data["mtime"]=stat(cName)[ST_MTIME] times=sol.getTimes() try: data["first"]=times[0] except IndexError: data["first"]="None" try: data["last"]=times[-1] except IndexError: data["last"]="None" data["nrSteps"]=len(times) data["procs"]=sol.nrProcs() data["pFirst"]=-1 data["pLast"]=-1 data["nrParallel"]=-1 if self.opts.parallel: pTimes=sol.getParallelTimes() data["nrParallel"]=len(pTimes) if len(pTimes)>0: data["pFirst"]=pTimes[0] data["pLast"]=pTimes[-1] data["name"]=cName data["diskusage"]=-1 if self.opts.diskusage: data["diskusage"]=diskUsage(cName) totalDiskusage+=data["diskusage"] if self.opts.parallel: for f in listdir(cName): if re.compile("processor[0-9]+").match(f): data["mtime"]=max(stat(path.join(cName,f))[ST_MTIME],data["mtime"]) if self.opts.state: try: data["nowTime"]=float(self.readState(sol,"CurrentTime")) except ValueError: data["nowTime"]=None try: data["lastOutput"]=time.mktime(time.strptime(self.readState(sol,"LastOutputSeen"))) except ValueError: data["lastOutput"]="nix" data["state"]=self.readState(sol,"TheState") if self.opts.state or self.opts.estimateEndTime: try: data["startedAt"]=time.mktime(time.strptime(self.readState(sol,"StartedAt"))) except ValueError: data["startedAt"]="nix" if self.opts.startEndTime or self.opts.estimateEndTime: try: ctrlDict=ParsedParameterFile(sol.controlDict(),doMacroExpansion=True) except PyFoamParserError: # Didn't work with Macro expansion. Let's try without try: ctrlDict=ParsedParameterFile(sol.controlDict()) except PyFoamParserError: ctrlDict=None if ctrlDict: data["startTime"]=ctrlDict["startTime"] data["endTime"]=ctrlDict["endTime"] else: data["startTime"]=None data["endTime"]=None if self.opts.estimateEndTime: data["endTimeEstimate"]=None if self.readState(sol,"TheState")=="Running": gone=time.time()-data["startedAt"] try: current=float(self.readState(sol,"CurrentTime")) frac=(current-data["startTime"])/(data["endTime"]-data["startTime"]) except ValueError: frac=0 if frac>0: data["endTimeEstimate"]=data["startedAt"]+gone/frac if len(customData)>0: fn=None pickleFile=None if useSolverInData: data["solver"]="none found" # try to find the oldest pickled file for f in ["pickledData","pickledUnfinishedData","pickledStartData"]: dirAndTime=[] for g in glob(path.join(cName,"*.analyzed")): pName=path.join(g,f) base=path.basename(g) if base.find("PyFoamRunner.")==0: solverName=base[len("PyFoamRunner."):-len(".analyzed")] else: solverName=None if path.exists(pName): dirAndTime.append((path.getmtime(pName),solverName,pName)) dirAndTime.sort(key=lambda x:x[0]) if len(dirAndTime)>0: data["solver"]=dirAndTime[-1][1] pickleFile=dirAndTime[-1][2] break solverName=data["solver"] else: solverName=self.opts.solverNameForCustom if pickleFile: fn=pickleFile else: for f in ["pickledData","pickledUnfinishedData","pickledStartData"]: fp=path.join(cName,"PyFoamRunner."+solverName+".analyzed",f) if path.exists(fp): fn=fp break if fn: raw=pickle.Unpickler(open(fn)).load() for n,spec in customData: dt=raw for k in spec: try: dt=dt[k] except KeyError: dt="No key '"+k+"'" break if isinstance(dt,string_types): break data[n]=dt else: for n,spec in customData: data[n]="no file" cData.append(data) except OSError: print_(cName,"is unreadable") if self.opts.progress: print_("Sorting data") cData.sort(key=lambda x:x[self.opts.sort],reverse=self.opts.reverse) if len(cData)==0: print_("No cases found") return if self.opts.dump: print_(cData) return lens={} for k in list(cData[0].keys()): lens[k]=len(k) for c in cData: for k in ["mtime","lastOutput","startedAt","endTimeEstimate"]: try: if c[k]!=None: if self.opts.relativeTime: c[k]=datetime.timedelta(seconds=long(time.time()-c[k])) else: c[k]=time.asctime(time.localtime(c[k])) except KeyError: pass except TypeError: c[k]=None try: c["diskusage"]=humanReadableSize(c["diskusage"]) except KeyError: pass for k,v in iteritems(c): lens[k]=max(lens[k],len(str(v))) format="" spec=["mtime"," | ","first"," - ","last"," (","nrSteps",") "] if self.opts.parallel: spec+=["| ","procs"," : ","pFirst"," - ","pLast"," (","nrParallel",") | "] if self.opts.diskusage: spec+=["diskusage"," | "] if self.hasState: spec+=["nowTime"," s ","state"," | "] if self.opts.advancedState: spec+=["lastOutput"," | ","startedAt"," | "] if self.opts.estimateEndTime: if not self.opts.advancedState: spec+=["startedAt"," | "] spec+=["endTimeEstimate"," | "] if self.opts.startEndTime: spec+=["startTime"," | ","endTime"," | "] if useSolverInData: spec+=["solver"," | "] for n,s in customData: spec+=[n," | "] spec+=["name"] for i,l in enumerate(spec): if not l in list(cData[0].keys()): format+=l else: if i<len(spec)-1: format+="%%(%s)%ds" % (l,lens[l]) else: format+="%%(%s)s" % (l) if self.opts.progress: print_("Printing\n\n") header=format % dict(list(zip(list(cData[0].keys()),list(cData[0].keys())))) print_(header) print_("-"*len(header)) for d in cData: for k in list(d.keys()): d[k]=str(d[k]) print_(format % d) if self.opts.diskusage: print_("Total disk-usage:",humanReadableSize(totalDiskusage))
def run(self): dirs=self.parser.getArgs() if len(dirs)==0: dirs=[path.curdir] cData=[] totalDiskusage=0 self.hasState=False for d in dirs: for n in listdir(d): cName=path.join(d,n) if path.isdir(cName): try: sol=SolutionDirectory(cName,archive=None,paraviewLink=False) if sol.isValid(): if self.opts.progress: print_("Processing",cName) data={} data["mtime"]=stat(cName)[ST_MTIME] times=sol.getTimes() try: data["first"]=times[0] except IndexError: data["first"]="None" try: data["last"]=times[-1] except IndexError: data["last"]="None" data["nrSteps"]=len(times) data["procs"]=sol.nrProcs() data["pFirst"]=-1 data["pLast"]=-1 data["nrParallel"]=-1 if self.opts.parallel: pTimes=sol.getParallelTimes() data["nrParallel"]=len(pTimes) if len(pTimes)>0: data["pFirst"]=pTimes[0] data["pLast"]=pTimes[-1] data["name"]=cName data["diskusage"]=-1 if self.opts.diskusage: try: data["diskusage"]=int( subprocess.Popen( ["du","-sb",cName], stdout=subprocess.PIPE, stderr=open(os.devnull,"w") ).communicate()[0].split()[0]) except IndexError: # assume that this du does not support -b data["diskusage"]=int( subprocess.Popen( ["du","-sk",cName], stdout=subprocess.PIPE ).communicate()[0].split()[0])*1024 totalDiskusage+=data["diskusage"] if self.opts.parallel: for f in listdir(cName): if re.compile("processor[0-9]+").match(f): data["mtime"]=max(stat(path.join(cName,f))[ST_MTIME],data["mtime"]) if self.opts.state: try: data["nowTime"]=float(self.readState(sol,"CurrentTime")) except ValueError: data["nowTime"]=None try: data["lastOutput"]=time.mktime(time.strptime(self.readState(sol,"LastOutputSeen"))) except ValueError: data["lastOutput"]="nix" data["state"]=self.readState(sol,"TheState") if self.opts.state or self.opts.estimateEndTime: try: data["startedAt"]=time.mktime(time.strptime(self.readState(sol,"StartedAt"))) except ValueError: data["startedAt"]="nix" if self.opts.startEndTime or self.opts.estimateEndTime: try: ctrlDict=ParsedParameterFile(sol.controlDict(),doMacroExpansion=True) except PyFoamParserError: # Didn't work with Macro expansion. Let's try without ctrlDict=ParsedParameterFile(sol.controlDict()) data["startTime"]=ctrlDict["startTime"] data["endTime"]=ctrlDict["endTime"] if self.opts.estimateEndTime: data["endTimeEstimate"]=None if self.readState(sol,"TheState")=="Running": gone=time.time()-data["startedAt"] try: current=float(self.readState(sol,"CurrentTime")) frac=(current-data["startTime"])/(data["endTime"]-data["startTime"]) except ValueError: frac=0 if frac>0: data["endTimeEstimate"]=data["startedAt"]+gone/frac cData.append(data) except OSError: print_(cName,"is unreadable") if self.opts.progress: print_("Sorting data") if self.opts.reverse: cData.sort(lambda x,y:cmp(y[self.opts.sort],x[self.opts.sort])) else: cData.sort(lambda x,y:cmp(x[self.opts.sort],y[self.opts.sort])) if len(cData)==0: print_("No cases found") return if self.opts.dump: print_(cData) return lens={} for k in list(cData[0].keys()): lens[k]=len(k) for c in cData: for k in ["mtime","lastOutput","startedAt","endTimeEstimate"]: try: if c[k]!=None: if self.opts.relativeTime: c[k]=datetime.timedelta(seconds=long(time.time()-c[k])) else: c[k]=time.asctime(time.localtime(c[k])) except KeyError: pass except TypeError: c[k]=None try: c["diskusage"]=humanReadableSize(c["diskusage"]) except KeyError: pass for k,v in iteritems(c): lens[k]=max(lens[k],len(str(v))) format="" spec=["mtime"," | ","first"," - ","last"," (","nrSteps",") "] if self.opts.parallel: spec+=["| ","procs"," : ","pFirst"," - ","pLast"," (","nrParallel",") | "] if self.opts.diskusage: spec+=["diskusage"," | "] if self.hasState: spec+=["nowTime"," s ","state"," | "] if self.opts.advancedState: spec+=["lastOutput"," | ","startedAt"," | "] if self.opts.estimateEndTime: if not self.opts.advancedState: spec+=["startedAt"," | "] spec+=["endTimeEstimate"," | "] if self.opts.startEndTime: spec+=["startTime"," | ","endTime"," | "] spec+=["name"] for i,l in enumerate(spec): if not l in list(cData[0].keys()): format+=l else: if i<len(spec)-1: format+="%%(%s)%ds" % (l,lens[l]) else: format+="%%(%s)s" % (l) if self.opts.progress: print_("Printing\n\n") header=format % dict(list(zip(list(cData[0].keys()),list(cData[0].keys())))) print_(header) print_("-"*len(header)) for d in cData: for k in list(d.keys()): d[k]=str(d[k]) print_(format % d) if self.opts.diskusage: print_("Total disk-usage:",humanReadableSize(totalDiskusage))
def run(self): dirs = self.parser.getArgs() if len(dirs) == 0: dirs = [path.curdir] cData = [] totalDiskusage = 0 useSolverInData = False self.hasState = False customData = [] for i, c in enumerate(self.opts.customData): lst = c.split("=") if len(lst) == 2: name, spec = lst name += "_" # Make sure that there is no collision with standard-names elif len(lst) == 1: name, spec = "Custom%d" % (i + 1), c else: self.error("Custom specification", c, "does not fit the pattern 'name=subs1::subs2::..'") customData.append((name, spec.split("::"))) if len(customData) > 0 and not self.opts.solverNameForCustom: self.warning( "Parameter '--solver-name-for-custom-data' should be set if '--custom-data' is used" ) useSolverInData = True elif self.opts.hostname: useSolverInData = True for d in tqdm(dirs, unit="dirs", disable=not self.opts.progressBar or len(dirs) < 2): if not path.isdir(d): self.warning("There is no directory", d, "here") continue for n in tqdm(listdir(d), unit="entries", desc=path.basename(path.abspath(d)), disable=not self.opts.progressBar): if not self.fnmatch(n): continue cName = path.join(d, n) if path.isdir(cName): try: sol = SolutionDirectory(cName, archive=None, paraviewLink=False) if sol.isValid(): if self.opts.progress: print_("Processing", cName) data = {} data["mtime"] = stat(cName)[ST_MTIME] times = sol.getTimes() try: data["first"] = times[0] except IndexError: data["first"] = "None" try: data["last"] = times[-1] except IndexError: data["last"] = "None" data["nrSteps"] = len(times) data["procs"] = sol.nrProcs() data["pFirst"] = -1 data["pLast"] = -1 data["nrParallel"] = -1 if self.opts.parallel: pTimes = sol.getParallelTimes() data["nrParallel"] = len(pTimes) if len(pTimes) > 0: data["pFirst"] = pTimes[0] data["pLast"] = pTimes[-1] data["name"] = cName data["diskusage"] = -1 if self.opts.diskusage: data["diskusage"] = diskUsage(cName) totalDiskusage += data["diskusage"] if self.opts.parallel: for f in listdir(cName): if re.compile("processor[0-9]+").match(f): data["mtime"] = max( stat(path.join(cName, f))[ST_MTIME], data["mtime"]) if self.opts.state: try: data["nowTime"] = float( self.readState(sol, "CurrentTime")) except ValueError: data["nowTime"] = None try: data["lastOutput"] = time.mktime( time.strptime( self.readState( sol, "LastOutputSeen"))) except ValueError: data["lastOutput"] = "nix" data["state"] = self.readState(sol, "TheState") if self.opts.state or self.opts.estimateEndTime: try: data["startedAt"] = time.mktime( time.strptime( self.readState(sol, "StartedAt"))) except ValueError: data["startedAt"] = "nix" if self.opts.startEndTime or self.opts.estimateEndTime: try: ctrlDict = ParsedParameterFile( sol.controlDict(), doMacroExpansion=True) except PyFoamParserError: # Didn't work with Macro expansion. Let's try without try: ctrlDict = ParsedParameterFile( sol.controlDict()) except PyFoamParserError: ctrlDict = None if ctrlDict: data["startTime"] = ctrlDict["startTime"] data["endTime"] = ctrlDict["endTime"] else: data["startTime"] = None data["endTime"] = None if self.opts.estimateEndTime: data["endTimeEstimate"] = None if self.readState(sol, "TheState") == "Running": gone = time.time() - data["startedAt"] try: current = float( self.readState(sol, "CurrentTime")) frac = (current - data["startTime"] ) / (data["endTime"] - data["startTime"]) except ValueError: frac = 0 if frac > 0: data["endTimeEstimate"] = data[ "startedAt"] + gone / frac if self.opts.hgInfo: if path.isdir(path.join(cName, ".hg")): from stat import ST_ATIME prevStat = stat(cName) try: data["hgInfo"] = sub.Popen( [ "hg", "id", "-R", cName, "-b", "-n", "-i" ], stdout=sub.PIPE).communicate( )[0].strip() except OSError: data["hgInfo"] = "<hg not working>" postStat = stat(cName) if prevStat[ST_MTIME] != postStat[ST_MTIME]: # hg seems to modify the modification time of the directory. So reset it os.utime(cName, (postStat[ST_ATIME], prevStat[ST_MTIME])) else: data["hgInfo"] = "<no .hg directory>" if len(customData) > 0 or self.opts.hostname: fn = None pickleFile = None if useSolverInData: data["solver"] = "none found" # try to find the oldest pickled file for f in [ "pickledData", "pickledUnfinishedData", "pickledStartData" ]: dirAndTime = [] for g in glob( path.join(cName, "*.analyzed")): pName = path.join(g, f) base = path.basename(g) if base.find("PyFoamRunner.") == 0: solverName = base[ len("PyFoamRunner." ):-len(".analyzed")] else: solverName = None if path.exists(pName): dirAndTime.append( (path.getmtime(pName), solverName, pName)) dirAndTime.sort(key=lambda x: x[0]) if len(dirAndTime) > 0: data["solver"] = dirAndTime[-1][1] pickleFile = dirAndTime[-1][2] break solverName = data["solver"] else: solverName = self.opts.solverNameForCustom if pickleFile: fn = pickleFile else: for f in [ "pickledData", "pickledUnfinishedData", "pickledStartData" ]: fp = path.join( cName, "PyFoamRunner." + solverName + ".analyzed", f) if path.exists(fp): fn = fp break if fn: raw = pickle.Unpickler(open(fn, "rb")).load() for n, spec in customData: dt = raw for k in spec: try: dt = dt[k] except KeyError: dt = "No key '" + k + "'" break if isinstance(dt, string_types): break data[n] = dt if self.opts.hostname: try: data["hostname"] = raw[ "hostname"].split(".")[0] except KeyError: data["hostname"] = "<unspecified>" else: for n, spec in customData: data[n] = "<no file>" if self.opts.hostname: data["hostname"] = "<no file>" cData.append(data) except OSError: print_(cName, "is unreadable") if self.opts.progress: print_("Sorting data") cData.sort(key=lambda x: x[self.opts.sort], reverse=self.opts.reverse) if len(cData) == 0: print_("No cases found") return if self.opts.dump: print_(cData) return lens = {} for k in list(cData[0].keys()): lens[k] = len(k) for c in cData: for k in ["mtime", "lastOutput", "startedAt", "endTimeEstimate"]: try: if c[k] != None: if self.opts.relativeTime: c[k] = datetime.timedelta( seconds=long(time.time() - c[k])) else: c[k] = time.asctime(time.localtime(c[k])) except KeyError: pass except TypeError: c[k] = None try: c["diskusage"] = humanReadableSize(c["diskusage"]) except KeyError: pass for k, v in iteritems(c): lens[k] = max(lens[k], len(str(v))) format = "" spec = ["mtime", " | "] if self.opts.hostname: spec += ["hostname", " | "] spec += ["first", " - ", "last", " (", "nrSteps", ") "] if self.opts.parallel: spec += [ "| ", "procs", " : ", "pFirst", " - ", "pLast", " (", "nrParallel", ") | " ] if self.opts.diskusage: spec += ["diskusage", " | "] if self.hasState: spec += ["nowTime", " s ", "state", " | "] if self.opts.advancedState: spec += ["lastOutput", " | ", "startedAt", " | "] if self.opts.estimateEndTime: if not self.opts.advancedState: spec += ["startedAt", " | "] spec += ["endTimeEstimate", " | "] if self.opts.startEndTime: spec += ["startTime", " | ", "endTime", " | "] if useSolverInData: spec += ["solver", " | "] for n, s in customData: spec += [n, " | "] if self.opts.hgInfo: spec += ["hgInfo", " | "] spec += ["name"] for i, l in enumerate(spec): if not l in list(cData[0].keys()): format += l else: if i < len(spec) - 1: format += "%%(%s)%ds" % (l, lens[l]) else: format += "%%(%s)s" % (l) if self.opts.progress: print_("Printing\n\n") header = format % dict( list(zip(list(cData[0].keys()), list(cData[0].keys())))) print_(header) print_("-" * len(header)) for d in cData: for k in list(d.keys()): d[k] = str(d[k]) print_(format % d) if self.opts.diskusage: print_("Total disk-usage:", humanReadableSize(totalDiskusage))
def run(self): doPic=True doGeom=False doSources=False if self.opts.geomType: if PVVersion()<(3,9): self.error("This paraview version does not support geometry writing") doGeom=True doPic=self.opts.pictureWithGeometry if len(self.opts.sources)==0: self.opts.sources=[""] # add empty string as token if self.opts.sourcesList: doPic=False doGeom=False doSources=True try: filterColors=eval(self.opts.filterColors) except TypeError: filterColors=self.opts.filterColors for f in filterColors: c=filterColors[f] if type(c)==tuple: if not c[1]: filterColors[f]=(c[0],self.opts.defaultField) else: if not c: filterColors[f]=self.opts.defaultField try: colorRanges=eval(self.opts.colorRanges) except TypeError: colorRanges=self.opts.colorRanges try: percentileRanges=eval(self.opts.percentileRanges) except TypeError: percentileRanges=self.opts.percentileRanges self.say("Paraview version",PVVersion(),"FoamVersion",foamVersion()) # if PVVersion()>=(3,6): # self.warning("This is experimental because the API in Paraview>=3.6 has changed. But we'll try") case=path.abspath(self.parser.getArgs()[0]) short=path.basename(case) stateString="" if self.opts.state==None: self.opts.state=path.join(case,"default.pvsm") else: stateString="_"+path.splitext(path.basename(self.opts.state))[0] if not path.exists(self.opts.state): self.error("The state file",self.opts.state,"does not exist") timeString="" if self.opts.casename: timeString+="_"+short timeString+="_%(nr)05d" if self.opts.timename: timeString+="_t=%(t)s" sol=SolutionDirectory(case, paraviewLink=False, archive=None) self.say("Opening state file",self.opts.state) sf=StateFile(self.opts.state) decoResult=None newParallelMode=None if self.opts.decomposeMode=="keep": pass elif self.opts.decomposeMode=="decomposed": decoResult=sf.setDecomposed(True) newParallelMode=True elif self.opts.decomposeMode=="reconstructed": decoResult=sf.setDecomposed(False) newParallelMode=False elif self.opts.decomposeMode=="auto": nrTimes=len(sol.getTimes()) nrParTimes=len(sol.getParallelTimes()) if nrTimes>nrParTimes: newParallelMode=False decoResult=sf.setDecomposed(False) else: newParallelMode=True decoResult=sf.setDecomposed(True) else: self.error("Setting decompose mode",self.opts.decomposeMode,"is not implemented") if decoResult: self.warning("Setting decomposed type to",self.opts.decomposeMode,":",decoResult) if newParallelMode: if self.opts.parallelTimes!=newParallelMode: self.warning("Resetting parallel mode",newParallelMode) self.opts.parallelTimes=newParallelMode times=self.processTimestepOptions(sol) if len(times)<1: self.warning("Can't continue without time-steps") return dataFile=path.join(case,short+".OpenFOAM") createdDataFile=False if not path.exists(dataFile): self.say("Creating",dataFile) createdDataFile=True f=open(dataFile,"w") f.close() self.say("Setting data to",dataFile) sf.setCase(dataFile) values={} if self.opts.addPrepareCaseParameters: fName=path.join(case,PrepareCase.parameterOutFile) if path.exists(fName): self.say("Adding vaules from",fName) pf=ParsedParameterFile(fName,noHeader=True) values.update(pf.content) else: self.say("No file",fName) values.update(eval(self.opts.replacements)) values[self.opts.casenameKey]=short if self.opts.listReplacements: rKeys=sorted(values.keys()) kLen=max([len(k) for k in rKeys]) vLen=max([len(str(values[k])) for k in rKeys]) kFormat=" %"+str(kLen)+"s | %"+str(vLen)+"s" print print kFormat % ("Key","Value") print "-"*(kLen+2)+"|"+"-"*(vLen+2) for k in rKeys: print kFormat % (k,str(values[k])) print sf.rewriteTexts(values) newState=sf.writeTemp() self.say("Setting session manager with reader type",sf.readerType()) sm=SM(requiredReader=sf.readerType()) exporterType=None if doGeom: self.say("Getting geometry exporter",self.geomTypeTable[self.opts.geomType]) exporters=sm.createModule("exporters") exporterType=getattr(exporters,self.geomTypeTable[self.opts.geomType]) # make sure that the first snapshot is rendered correctly import paraview.simple as pvs pvs._DisableFirstRenderCameraReset() if not self.opts.progress: self.say("Toggling progress") sm.ToggleProgressPrinting() self.say("Loading state") sm.LoadState(newState) self.say("Getting Views") rViews=sm.GetRenderViews() views=pvs.GetViews() if len(views)>1: self.warning("More than 1 view in state-file. Generating multiple series") timeString="_View%(view)02d"+timeString timeString=self.opts.prefix+timeString+stateString self.say("Setting Offscreen rendering") offWarn=True for iView,view in enumerate(views): self.say("Processing view",iView,"of",len(views)) if self.opts.offscreenRender: view.UseOffscreenRenderingForScreenshots=True if offWarn: self.warning("Trying offscreen rendering. If writing the file fails with a segmentation fault try --no-offscreen-rendering") elif offWarn: self.warning("No offscreen rendering. Camera perspective will probably be wrong") offWarn=False allSources=None alwaysSources=None self.say("Starting times",times) for i,t in enumerate(times): self.say("Nr",i,"time",t) print "Snapshot ",i," for t=",t, sys.stdout.flush() self.say() layouts=[] colorPrefix="" # from paraview.simple import UpdatePipeline # UpdatePipeline(time=float(t)) if len(colorRanges)>0: for c in colorRanges: rng=colorRanges[c] self.say("Setting color",c,"to range",rng) self.setColorTransferFunction(c,rng) if PVVersion()>=(4,2) and len(filterColors)>0: self.say("Switch colors") from paraview.simple import GetSources,GetDisplayProperties,GetColorTransferFunction,GetScalarBar,HideUnusedScalarBars,UpdatePipeline,ColorBy,SetActiveView,GetRepresentations sources=GetSources() changedSources=set() for j,view in enumerate(views): for n in sources: if n[0] in filterColors: if view in rViews: self.say("Found",n[0],"to be switched") # This does not work as expected. # dp=GetDisplayProperties(sources[n],view) display=sm.GetRepresentation(sources[n],view) if display==None: self.say("No representation for",n[0],"in this view") continue if display.Visibility==0: self.say("Not switching",n[0],"because it is not visible in this view") # Invisible Sources don't need a color-change # Currently Visibily does not work as I expect it (is always 1) continue if type(filterColors[n[0]])==tuple: assoc,col=filterColors[n[0]] else: assoc,col=display.ColorArrayName[0],filterColors[n[0]] if display.ColorArrayName==[assoc,col]: self.say("Color already set to",assoc,col,".Skipping") continue ColorBy(display,[assoc,col]) # display.ColorArrayName=[assoc,col] changedSources.add(n[0]) color=GetColorTransferFunction(col) # display.ColorArrayName=filterColors[n[0]] # display.LookupTable=color # UpdatePipeline(proxy=sources[n]) if n[0] not in self.opts.noColorbar and (len(self.opts.colorbarView)==0 or j in self.opts.colorbarView): self.say("Adding a colorbar") scalar=GetScalarBar(color,view) scalar.Visibility=1 elif sources[n].__class__.__name__=="Histogram" \ and view.__class__.__name__=="BarChartView": self.say(n,"is a Histogram") # dp=GetDisplayProperties(sources[n],view) assoc,oldCol=sources[n].SelectInputArray col=filterColors[n[0]] self.say("Setting color from",oldCol,"to",col) sources[n].SelectInputArray=[assoc,col] else: # self.say(n,"is unsuppored Source:",sources[n], # "View:",view) pass HideUnusedScalarBars(view) if self.opts.colorPrefix: for s in changedSources: if type(filterColors[s])==tuple: colorPrefix+=s+"="+filterColors[s][1]+"_" else: colorPrefix+=s+"="+filterColors[s]+"_" for c in self.opts.rescaleToSource: found=False from paraview.simple import GetSources sources=GetSources() for n in sources: if n[0]==c: src=sources[n] found=True for view in views: display=sm.GetRepresentation(sources[n],view) if display==None: continue if display.Visibility==0: continue col=display.ColorArrayName[1] src.UpdatePipeline(time=float(t)) if col in percentileRanges: low,hig=percentileRanges[col] if low is None: low=0 if hig is None: hig=100 else: hig=100-hig rng=self.getDataRangePercentile(src,col,low=low,high=hig) else: rng=self.getDataRange(src,col) if not rng is None: self.say("Resetting color function",col,"to range",rng,"because of data set",c) if col in colorRanges: low,hig=colorRanges[col] if low is not None: rng=low,rng[1] if hig is not None: rng=rng[0],hig self.say("Extremes overruled to",rng,"for resetting") self.setColorTransferFunction(col,rng) else: self.warning("No field",col,"found on",c) break if not found: self.warning("Source",c,"not found") for j,view in enumerate(views): if len(views)>0: print "View %d" % j, sys.stdout.flush() self.say() view.ViewTime=float(t) if doPic: print self.opts.picType, sys.stdout.flush() fn = (timeString % {'nr':i,'t':t,'view':j})+"."+self.opts.picType if PVVersion()<(3,6): self.say("Very old Paraview writing") view.WriteImage(fn, self.picTypeTable[self.opts.picType], self.opts.magnification) elif PVVersion()<(4,2): self.say("Old Paraview writing") from paraview.simple import SetActiveView,Render,WriteImage self.say("Setting view") SetActiveView(view) self.say("Rendering") Render() self.say("Writing image",fn,"type",self.picTypeTable[self.opts.picType]) # This may produce a segmentation fault with offscreen rendering WriteImage(fn, view, # Writer=self.picTypeTable[self.opts.picType], Magnification=self.opts.magnification) self.say("Finished writing") else: doRender=True usedView=view self.say("New Paraview writing") from paraview.simple import SetActiveView,SaveScreenshot,GetLayout,GetSources layout=GetLayout(view) if self.opts.doLayouts: usedView=None if layout in layouts: doRender=False else: layouts.append(layout) else: layout=None if doRender: self.say("Writing image",colorPrefix+fn,"type",self.picTypeTable[self.opts.picType]) # This may produce a segmentation fault with offscreen rendering SaveScreenshot(colorPrefix+fn, view=usedView, layout=layout, magnification=self.opts.magnification) else: self.say("Skipping image",colorPrefix+fn) self.say("Finished writing") if doGeom: from paraview.simple import Show,Hide,GetSources print self.opts.geomType, sys.stdout.flush() for select in self.opts.sources: fn = (timeString % {'nr':i,'t':t,'view':j}) if select!="": print "*"+select+"*", sys.stdout.flush() fn+="_"+select sources=GetSources() for n,s in sources.iteritems(): if n[0].find(select)>=0: Show(s,view) else: Hide(s,view) fn += "."+self.opts.geomType self.say("Creating exporter for file",fn) ex=exporterType(FileName=fn) ex.SetView(view) ex.Write() if doSources: from paraview.simple import GetSources srcNames=[] sources=GetSources() for n in sources: srcNames.append(n[0]) if allSources==None: allSources=set(srcNames) alwaysSources=set(srcNames) else: allSources|=set(srcNames) alwaysSources&=set(srcNames) print if doSources: print print "List of found sources (* means that it is present in all timesteps)" for n in allSources: if n in alwaysSources: flag="*" else: flag=" " print " %s %s" % (flag,n) if createdDataFile: self.warning("Removing pseudo-data-file",dataFile) unlink(dataFile) del sm
def run(self): dirs=self.parser.getArgs() if len(dirs)==0: dirs=[path.curdir] cData=[] totalDiskusage=0 self.hasState=False for d in dirs: for n in listdir(d): cName=path.join(d,n) if path.isdir(cName): try: sol=SolutionDirectory(cName,archive=None,paraviewLink=False) if sol.isValid(): if self.opts.progress: print "Processing",cName data={} data["mtime"]=stat(cName)[ST_MTIME] times=sol.getTimes() try: data["first"]=times[0] except IndexError: data["first"]="None" try: data["last"]=times[-1] except IndexError: data["last"]="None" data["nrSteps"]=len(times) data["procs"]=sol.nrProcs() data["pFirst"]=-1 data["pLast"]=-1 data["nrParallel"]=-1 if self.opts.parallel: pTimes=sol.getParallelTimes() data["nrParallel"]=len(pTimes) if len(pTimes)>0: data["pFirst"]=pTimes[0] data["pLast"]=pTimes[-1] data["name"]=cName data["diskusage"]=-1 if self.opts.diskusage: data["diskusage"]=int(subprocess.Popen(["du","-sm",cName], stdout=subprocess.PIPE).communicate()[0].split()[0]) totalDiskusage+=data["diskusage"] if self.opts.parallel: for f in listdir(cName): if re.compile("processor[0-9]+").match(f): data["mtime"]=max(stat(path.join(cName,f))[ST_MTIME],data["mtime"]) if self.opts.state: try: data["nowTime"]=float(self.readState(sol,"CurrentTime")) except ValueError: data["nowTime"]=None try: data["lastOutput"]=time.mktime(time.strptime(self.readState(sol,"LastOutputSeen"))) except ValueError: data["lastOutput"]="nix" try: data["startedAt"]=time.mktime(time.strptime(self.readState(sol,"StartedAt"))) except ValueError: data["startedAt"]="nix" data["state"]=self.readState(sol,"TheState") cData.append(data) except OSError: print cName,"is unreadable" if self.opts.progress: print "Sorting data" if self.opts.reverse: cData.sort(lambda x,y:cmp(y[self.opts.sort],x[self.opts.sort])) else: cData.sort(lambda x,y:cmp(x[self.opts.sort],y[self.opts.sort])) if len(cData)==0: print "No cases found" return if self.opts.dump: print cData return lens={} for k in cData[0].keys(): lens[k]=len(k) for c in cData: for k in ["mtime","lastOutput","startedAt"]: try: if self.opts.relativeTime: c[k]=datetime.timedelta(seconds=long(time.time()-c[k])) else: c[k]=time.asctime(time.localtime(c[k])) except KeyError: pass except TypeError: c[k]=None for k,v in c.iteritems(): lens[k]=max(lens[k],len(str(v))) format="" spec=["mtime"," | ","first"," - ","last"," (","nrSteps",") "] if self.opts.parallel: spec+=["| ","procs"," : ","pFirst"," - ","pLast"," (","nrParallel",") | "] if self.opts.diskusage: spec+=["diskusage"," MB "] if self.hasState: spec+=["nowTime"," s ","state"," | "] if self.opts.advancedState: spec+=["lastOutput"," | ","startedAt"," | "] spec+=["name"] for i,l in enumerate(spec): if not l in cData[0].keys(): format+=l else: if i<len(spec)-1: format+="%%(%s)%ds" % (l,lens[l]) else: format+="%%(%s)s" % (l) if self.opts.progress: print "Printing\n\n" header=format % dict(zip(cData[0].keys(),cData[0].keys())) print header print "-"*len(header) for d in cData: for k in d.keys(): d[k]=str(d[k]) print format % d if self.opts.diskusage: print "Total disk-usage:",totalDiskusage,"MB"