def test_with_broyden(self): """ broyden test """ if not foamVersionNumber() in [(2,3),(2,2)]: raise unittest.SkipTest("need ver.2.3 or 2.2 for this unittest.") cavityTut = os.path.join(foamTutorials(), "incompressible/icoFoam/cavity") if not os.path.exists(cavityTut): raise unittest.SkipTest("need $FOAM_TUTORIALS/incompressible/cavity \ for unittest.") try: shutil.copytree(cavityTut, os.path.join(self.tmpDir,"cavity")) cavityCase = SolutionDirectory(os.path.join(self.tmpDir,"cavity")) except: raise unittest.SkipTest("can not copy cavity case to temp_dir.") #create Allrun with open(os.path.join(cavityCase.name,"Allrun"),'w') as fp: fp.write('#!/bin/sh\nblockMesh>log.blockMesh\nicoFoam>log.icoFoam\n') os.chmod(os.path.join(cavityCase.name,"Allrun"),0777) #append controlDict fObj=""" functions { probes { type probes; functionObjectLibs ("libsampling.so"); enabled true; outputControl timeStep; outputInterval 1; fields ( p ); probeLocations ( ( 0.1 0.0925 0.005 ) ); } } """ with open(cavityCase.controlDict(),'a') as fp: fp.write(fObj) #test start sim = set_as_top(BroydenCavityInstance()) sim.cavity.case_dir = cavityCase.name sim.run() self.assertEqual(round(sim.cavity.nu,4),0.01)
def timeChanged(self): self.nSteps += 1 self.currTime = self.analyzer.time self.progressString = self.analyzer.progressOut.lastProgress() if self.analyzer.hasAnalyzer("Execution"): self.clockTime = self.analyzer.getAnalyzer( "Execution").clockTotal() if self.startTime is None: if self.runner: self.startTime = self.runner.createTime else: self.startTime = self.analyzer.getAnalyzer("Time").createTime() if self.endTime is None: sol = None if self.runner: sol = self.runner.getSolutionDirectory() else: if self.analyzer.hasAnalyzer("ExecName"): caseName = self.analyzer.getAnalyzer("ExecName").caseName if caseName and path.isdir(caseName): from PyFoam.RunDictionary.SolutionDirectory import SolutionDirectory sol = SolutionDirectory(caseName, paraviewLink=False) if sol: from PyFoam.RunDictionary.ParameterFile import ParameterFile control = ParameterFile(sol.controlDict()) try: self.endTime = float(control.readParameter("endTime")) except ValueError: self.endTime = -1 if self.caseName is None or self.execName is None: if self.analyzer.hasAnalyzer("ExecName"): self.caseName = self.analyzer.getAnalyzer("ExecName").caseName self.execName = self.analyzer.getAnalyzer("ExecName").execName self.headerChanged = True from PyFoam.LogAnalysis.LogLineAnalyzer import LogLineAnalyzer for e in LogLineAnalyzer.allRegexp: addExpr(e) if self.firstTime: self.update(resize=True) self.firstTime = False else: self._checkHeaders(force=True)
def timeChanged(self): self.nSteps+=1 self.currTime=self.analyzer.time self.progressString=self.analyzer.progressOut.lastProgress() if self.analyzer.hasAnalyzer("Execution"): self.clockTime=self.analyzer.getAnalyzer("Execution").clockTotal() if self.startTime is None: if self.runner: self.startTime=self.runner.createTime else: self.startTime=self.analyzer.getAnalyzer("Time").createTime() if self.endTime is None: sol=None if self.runner: sol=self.runner.getSolutionDirectory() else: if self.analyzer.hasAnalyzer("ExecName"): caseName=self.analyzer.getAnalyzer("ExecName").caseName if caseName and path.isdir(caseName): from PyFoam.RunDictionary.SolutionDirectory import SolutionDirectory sol=SolutionDirectory(caseName,paraviewLink=False) if sol: from PyFoam.RunDictionary.ParameterFile import ParameterFile control=ParameterFile(sol.controlDict()) try: self.endTime=float(control.readParameter("endTime")) except ValueError: self.endTime=-1 if self.caseName is None or self.execName is None: if self.analyzer.hasAnalyzer("ExecName"): self.caseName=self.analyzer.getAnalyzer("ExecName").caseName self.execName=self.analyzer.getAnalyzer("ExecName").execName self.headerChanged=True from PyFoam.LogAnalysis.LogLineAnalyzer import LogLineAnalyzer for e in LogLineAnalyzer.allRegexp: addExpr(e) if self.firstTime: self.update(resize=True) self.firstTime=False else: self._checkHeaders(force=True)
class FoamBaseComponent(Component): """This class is base Component to execute OpenFOAM pre,main and post commands""" case_dir = Str("", iotype="in", desc='OpenFOAM Case Dir. Absolute path or relative path in $FOAM_RUN.') force_fd = Bool(True, iotype='in', framework_var=True, deriv_ignore=True, desc="If True, always finite difference this component.") def __init__(self): """Component.__init__() and check the path to icoFoam.""" super(FoamBaseComponent, self).__init__() self.foam_case = None #Check OpenFOAM commands in $PATH. if not self._which("icoFoam"): self.raise_exception("OpenFOAM command is not found. Check $PATH.", RuntimeError) if not self.case_dir == '': caseDir = self.getPath(self.case_dir, False) if not caseDir == None: #type(caseDir) is str or unicode self.foam_case = SolutionDirectory(str(caseDir)) def check_config(self): if not self.foam_case and self.case_dir == "": self.raise_exception("Not set self.case_dir.", RuntimeError) if not os.path.exists(self.foam_case.controlDict()): self.raise_exception("%s is not found. Check self.case_dir." % (self.foam_case.controlDict()), RuntimeError) def _which(self, cmd): """which command with python.""" def is_exe(val): return os.path.isfile(val) and os.access(val, os.X_OK) fpath, fname = os.path.split(cmd) if fpath: if is_exe(cmd): return cmd else: for path in os.environ["PATH"].split(os.pathsep): path = path.strip('"') exe_file = os.path.join(path, cmd) if is_exe(exe_file): return exe_file return None def getPath(self, fdpath, riseError=True): """check and get the absolute path or relative path in $FOAM_RUN.""" if os.path.exists(fdpath): return fdpath bool_foam_run = True foam_run = "" try: foam_run = os.environ['FOAM_RUN'] fpath = os.path.join(foam_run, fdpath) except KeyError: bool_foam_run = False if bool_foam_run and os.path.exists(fpath): return fpath if riseError: self.raise_exception(" '%s' was not found." % fdpath, RuntimeError) return None def _input_trait_modified(self, obj, name, old, new): """hook th changing a trait.""" super(FoamBaseComponent, self)._input_trait_modified(obj, name, old, new) if name == 'case_dir': caseDir = self.getPath(new,False) if not caseDir == None: #type(caseDir) is str or unicode self.foam_case = SolutionDirectory(str(caseDir)) else: self.raise_exception("case_dir '%s' was" " not found." % new, RuntimeError)
def lookForCases(d): for n in tqdm(listdir(d), unit="entries", leave=False, desc=path.basename(path.abspath(d)), disable=not self.opts.progressBar): if not self.fnmatch(n): continue cName=path.join(d,n) if path.isdir(cName): try: sol=SolutionDirectory(cName,archive=None,paraviewLink=False) if sol.isValid(): if self.opts.progress: print_("Processing",cName) data={} data["mtime"]=stat(cName)[ST_MTIME] times=sol.getTimes() try: data["first"]=times[0] except IndexError: data["first"]="None" try: data["last"]=times[-1] except IndexError: data["last"]="None" data["nrSteps"]=len(times) data["procs"]=sol.nrProcs() data["pFirst"]=-1 data["pLast"]=-1 data["nrParallel"]=-1 if self.opts.parallel: pTimes=sol.getParallelTimes() data["nrParallel"]=len(pTimes) if len(pTimes)>0: data["pFirst"]=pTimes[0] data["pLast"]=pTimes[-1] data["name"]=cName data["diskusage"]=-1 if self.opts.diskusage: data["diskusage"]=diskUsage(cName) totalDiskusage+=data["diskusage"] if self.opts.parallel: for f in listdir(cName): if re.compile("processor[0-9]+").match(f): data["mtime"]=max(stat(path.join(cName,f))[ST_MTIME],data["mtime"]) if self.opts.state or self.opts.estimateEndTime: try: data["startedAt"]=time.mktime(time.strptime(self.readState(sol,"StartedAt"))) except ValueError: data["startedAt"]="nix" if self.opts.state: try: data["nowTime"]=float(self.readState(sol,"CurrentTime")) except ValueError: data["nowTime"]=None try: data["lastOutput"]=time.mktime(time.strptime(self.readState(sol,"LastOutputSeen"))) except ValueError: data["lastOutput"]="nix" data["state"]=self.readState(sol,"TheState") if data["state"]=="Running": try: gone=time.time()-data["lastOutput"] if gone>self.opts.deadThreshold: data["state"]="Dead "+humanReadableDuration(gone) except KeyError: pass except TypeError: pass if self.opts.startEndTime or self.opts.estimateEndTime: try: ctrlDict=ParsedParameterFile(sol.controlDict(),doMacroExpansion=True) except PyFoamParserError: # Didn't work with Macro expansion. Let's try without try: ctrlDict=ParsedParameterFile(sol.controlDict()) except PyFoamParserError: ctrlDict=None if ctrlDict: data["startTime"]=ctrlDict["startTime"] data["endTime"]=ctrlDict["endTime"] else: data["startTime"]=None data["endTime"]=None if self.opts.estimateEndTime: data["endTimeEstimate"]=None if self.readState(sol,"TheState")=="Running": gone=time.time()-data["startedAt"] try: current=float(self.readState(sol,"CurrentTime")) frac=(current-data["startTime"])/(data["endTime"]-data["startTime"]) except ValueError: frac=0 if frac>0: data["endTimeEstimate"]=data["startedAt"]+gone/frac if self.opts.hgInfo: if path.isdir(path.join(cName,".hg")): from stat import ST_ATIME prevStat=stat(cName) try: data["hgInfo"]=sub.Popen(["hg", "id", "-R",cName, "-b","-n","-i"], stdout=sub.PIPE).communicate()[0].strip() except OSError: data["hgInfo"]="<hg not working>" postStat=stat(cName) if prevStat[ST_MTIME]!=postStat[ST_MTIME]: # hg seems to modify the modification time of the directory. So reset it os.utime(cName,(postStat[ST_ATIME],prevStat[ST_MTIME])) else: data["hgInfo"]="<no .hg directory>" if len(customData)>0 or self.opts.hostname: fn=None pickleFile=None if useSolverInData: data["solver"]="none found" # try to find the oldest pickled file dirAndTime=[] for f in ["pickledData","pickledUnfinishedData","pickledStartData"]: for g in glob(path.join(cName,"*.analyzed")): pName=path.join(g,f) base=path.basename(g) if base.find("PyFoamRunner.")==0: solverName=base[len("PyFoamRunner."):-len(".analyzed")] else: solverName=None if path.exists(pName): dirAndTime.append((path.getmtime(pName),solverName,pName)) dirAndTime.sort(key=lambda x:x[0]) if len(dirAndTime)>0: data["solver"]=dirAndTime[-1][1] pickleFile=dirAndTime[-1][2] solverName=data["solver"] else: solverName=self.opts.solverNameForCustom if pickleFile: fn=pickleFile else: for f in ["pickledData","pickledUnfinishedData","pickledStartData"]: fp=path.join(cName,"PyFoamRunner."+solverName+".analyzed",f) if path.exists(fp): fn=fp break pickleOK=False if fn: try: raw=pickle.Unpickler(open(fn,"rb")).load() pickleOK=True for n,spec in customData: dt=raw for k in spec: try: dt=dt[k] except KeyError: dt="No key '"+k+"'" break if isinstance(dt,string_types): break data[n]=dt if self.opts.hostname: try: data["hostname"]=raw["hostname"].split(".")[0] except KeyError: data["hostname"]="<unspecified>" except ValueError: pass if not pickleOK: for n,spec in customData: data[n]="<no file>" if self.opts.hostname: data["hostname"]="<no file>" cData.append(data) elif self.opts.recursive: # print("Recurse",cName) lookForCases(cName) except OSError: print_(cName,"is unreadable")
def lookForCases(d): for n in tqdm(listdir(d), unit="entries", leave=False, desc=path.basename(path.abspath(d)), disable=not self.opts.progressBar): if not self.fnmatch(n): continue cName = path.join(d, n) if path.isdir(cName): try: sol = SolutionDirectory(cName, archive=None, paraviewLink=False) if sol.isValid(): if self.opts.progress: print_("Processing", cName) data = {} data["mtime"] = stat(cName)[ST_MTIME] times = sol.getTimes() try: data["first"] = times[0] except IndexError: data["first"] = "None" try: data["last"] = times[-1] except IndexError: data["last"] = "None" data["nrSteps"] = len(times) data["procs"] = sol.nrProcs() data["pFirst"] = -1 data["pLast"] = -1 data["nrParallel"] = -1 if self.opts.parallel: pTimes = sol.getParallelTimes() data["nrParallel"] = len(pTimes) if len(pTimes) > 0: data["pFirst"] = pTimes[0] data["pLast"] = pTimes[-1] data["name"] = cName data["diskusage"] = -1 if self.opts.diskusage: data["diskusage"] = diskUsage(cName) totalDiskusage += data["diskusage"] if self.opts.parallel: for f in listdir(cName): if re.compile("processor[0-9]+").match(f): data["mtime"] = max( stat(path.join(cName, f))[ST_MTIME], data["mtime"]) if self.opts.state or self.opts.estimateEndTime: try: data["startedAt"] = time.mktime( time.strptime( self.readState(sol, "StartedAt"))) except ValueError: data["startedAt"] = "nix" if self.opts.state: try: data["nowTime"] = float( self.readState(sol, "CurrentTime")) except ValueError: data["nowTime"] = None try: data["lastOutput"] = time.mktime( time.strptime( self.readState( sol, "LastOutputSeen"))) except ValueError: data["lastOutput"] = "nix" data["state"] = self.readState(sol, "TheState") if data["state"] == "Running": try: gone = time.time() - data["lastOutput"] if gone > self.opts.deadThreshold: data[ "state"] = "Dead " + humanReadableDuration( gone) except KeyError: pass except TypeError: pass if self.opts.startEndTime or self.opts.estimateEndTime: try: ctrlDict = ParsedParameterFile( sol.controlDict(), doMacroExpansion=True) except PyFoamParserError: # Didn't work with Macro expansion. Let's try without try: ctrlDict = ParsedParameterFile( sol.controlDict()) except PyFoamParserError: ctrlDict = None if ctrlDict: data["startTime"] = ctrlDict["startTime"] data["endTime"] = ctrlDict["endTime"] else: data["startTime"] = None data["endTime"] = None if self.opts.estimateEndTime: data["endTimeEstimate"] = None if self.readState(sol, "TheState") == "Running": gone = time.time() - data["startedAt"] try: current = float( self.readState(sol, "CurrentTime")) frac = (current - data["startTime"] ) / (data["endTime"] - data["startTime"]) except ValueError: frac = 0 if frac > 0: data["endTimeEstimate"] = data[ "startedAt"] + gone / frac if self.opts.hgInfo: if path.isdir(path.join(cName, ".hg")): from stat import ST_ATIME prevStat = stat(cName) try: data["hgInfo"] = sub.Popen( [ "hg", "id", "-R", cName, "-b", "-n", "-i" ], stdout=sub.PIPE).communicate( )[0].strip() except OSError: data["hgInfo"] = "<hg not working>" postStat = stat(cName) if prevStat[ST_MTIME] != postStat[ST_MTIME]: # hg seems to modify the modification time of the directory. So reset it os.utime(cName, (postStat[ST_ATIME], prevStat[ST_MTIME])) else: data["hgInfo"] = "<no .hg directory>" if len(customData) > 0 or self.opts.hostname: fn = None pickleFile = None if useSolverInData: data["solver"] = "none found" # try to find the oldest pickled file dirAndTime = [] for f in [ "pickledData", "pickledUnfinishedData", "pickledStartData" ]: for g in glob( path.join(cName, "*.analyzed")): pName = path.join(g, f) base = path.basename(g) if base.find("PyFoamRunner.") == 0: solverName = base[ len("PyFoamRunner." ):-len(".analyzed")] else: solverName = None if path.exists(pName): dirAndTime.append( (path.getmtime(pName), solverName, pName)) dirAndTime.sort(key=lambda x: x[0]) if len(dirAndTime) > 0: data["solver"] = dirAndTime[-1][1] pickleFile = dirAndTime[-1][2] solverName = data["solver"] else: solverName = self.opts.solverNameForCustom if pickleFile: fn = pickleFile else: for f in [ "pickledData", "pickledUnfinishedData", "pickledStartData" ]: fp = path.join( cName, "PyFoamRunner." + solverName + ".analyzed", f) if path.exists(fp): fn = fp break pickleOK = False if fn: try: raw = pickle.Unpickler(open( fn, "rb")).load() pickleOK = True for n, spec in customData: dt = raw for k in spec: try: dt = dt[k] except KeyError: dt = "No key '" + k + "'" break if isinstance( dt, string_types): break data[n] = dt if self.opts.hostname: try: data["hostname"] = raw[ "hostname"].split(".")[0] except KeyError: data[ "hostname"] = "<unspecified>" except ValueError: pass if not pickleOK: for n, spec in customData: data[n] = "<no file>" if self.opts.hostname: data["hostname"] = "<no file>" cData.append(data) elif self.opts.recursive: # print("Recurse",cName) lookForCases(cName) except OSError: print_(cName, "is unreadable")
def run(self): dirs=self.parser.getArgs() if len(dirs)==0: dirs=[path.curdir] cData=[] totalDiskusage=0 useSolverInData=False self.hasState=False customData=[] for i,c in enumerate(self.opts.customData): lst=c.split("=") if len(lst)==2: name,spec=lst name+="_" # Make sure that there is no collision with standard-names elif len(lst)==1: name,spec="Custom%d" % (i+1),c else: self.error("Custom specification",c,"does not fit the pattern 'name=subs1::subs2::..'") customData.append((name,spec.split("::"))) if len(customData)>0 and not self.opts.solverNameForCustom: self.warning("Parameter '--solver-name-for-custom-data' should be set if '--custom-data' is used") useSolverInData=True for d in dirs: for n in listdir(d): cName=path.join(d,n) if path.isdir(cName): try: sol=SolutionDirectory(cName,archive=None,paraviewLink=False) if sol.isValid(): if self.opts.progress: print_("Processing",cName) data={} data["mtime"]=stat(cName)[ST_MTIME] times=sol.getTimes() try: data["first"]=times[0] except IndexError: data["first"]="None" try: data["last"]=times[-1] except IndexError: data["last"]="None" data["nrSteps"]=len(times) data["procs"]=sol.nrProcs() data["pFirst"]=-1 data["pLast"]=-1 data["nrParallel"]=-1 if self.opts.parallel: pTimes=sol.getParallelTimes() data["nrParallel"]=len(pTimes) if len(pTimes)>0: data["pFirst"]=pTimes[0] data["pLast"]=pTimes[-1] data["name"]=cName data["diskusage"]=-1 if self.opts.diskusage: data["diskusage"]=diskUsage(cName) totalDiskusage+=data["diskusage"] if self.opts.parallel: for f in listdir(cName): if re.compile("processor[0-9]+").match(f): data["mtime"]=max(stat(path.join(cName,f))[ST_MTIME],data["mtime"]) if self.opts.state: try: data["nowTime"]=float(self.readState(sol,"CurrentTime")) except ValueError: data["nowTime"]=None try: data["lastOutput"]=time.mktime(time.strptime(self.readState(sol,"LastOutputSeen"))) except ValueError: data["lastOutput"]="nix" data["state"]=self.readState(sol,"TheState") if self.opts.state or self.opts.estimateEndTime: try: data["startedAt"]=time.mktime(time.strptime(self.readState(sol,"StartedAt"))) except ValueError: data["startedAt"]="nix" if self.opts.startEndTime or self.opts.estimateEndTime: try: ctrlDict=ParsedParameterFile(sol.controlDict(),doMacroExpansion=True) except PyFoamParserError: # Didn't work with Macro expansion. Let's try without try: ctrlDict=ParsedParameterFile(sol.controlDict()) except PyFoamParserError: ctrlDict=None if ctrlDict: data["startTime"]=ctrlDict["startTime"] data["endTime"]=ctrlDict["endTime"] else: data["startTime"]=None data["endTime"]=None if self.opts.estimateEndTime: data["endTimeEstimate"]=None if self.readState(sol,"TheState")=="Running": gone=time.time()-data["startedAt"] try: current=float(self.readState(sol,"CurrentTime")) frac=(current-data["startTime"])/(data["endTime"]-data["startTime"]) except ValueError: frac=0 if frac>0: data["endTimeEstimate"]=data["startedAt"]+gone/frac if len(customData)>0: fn=None pickleFile=None if useSolverInData: data["solver"]="none found" # try to find the oldest pickled file for f in ["pickledData","pickledUnfinishedData","pickledStartData"]: dirAndTime=[] for g in glob(path.join(cName,"*.analyzed")): pName=path.join(g,f) base=path.basename(g) if base.find("PyFoamRunner.")==0: solverName=base[len("PyFoamRunner."):-len(".analyzed")] else: solverName=None if path.exists(pName): dirAndTime.append((path.getmtime(pName),solverName,pName)) dirAndTime.sort(key=lambda x:x[0]) if len(dirAndTime)>0: data["solver"]=dirAndTime[-1][1] pickleFile=dirAndTime[-1][2] break solverName=data["solver"] else: solverName=self.opts.solverNameForCustom if pickleFile: fn=pickleFile else: for f in ["pickledData","pickledUnfinishedData","pickledStartData"]: fp=path.join(cName,"PyFoamRunner."+solverName+".analyzed",f) if path.exists(fp): fn=fp break if fn: raw=pickle.Unpickler(open(fn)).load() for n,spec in customData: dt=raw for k in spec: try: dt=dt[k] except KeyError: dt="No key '"+k+"'" break if isinstance(dt,string_types): break data[n]=dt else: for n,spec in customData: data[n]="no file" cData.append(data) except OSError: print_(cName,"is unreadable") if self.opts.progress: print_("Sorting data") cData.sort(key=lambda x:x[self.opts.sort],reverse=self.opts.reverse) if len(cData)==0: print_("No cases found") return if self.opts.dump: print_(cData) return lens={} for k in list(cData[0].keys()): lens[k]=len(k) for c in cData: for k in ["mtime","lastOutput","startedAt","endTimeEstimate"]: try: if c[k]!=None: if self.opts.relativeTime: c[k]=datetime.timedelta(seconds=long(time.time()-c[k])) else: c[k]=time.asctime(time.localtime(c[k])) except KeyError: pass except TypeError: c[k]=None try: c["diskusage"]=humanReadableSize(c["diskusage"]) except KeyError: pass for k,v in iteritems(c): lens[k]=max(lens[k],len(str(v))) format="" spec=["mtime"," | ","first"," - ","last"," (","nrSteps",") "] if self.opts.parallel: spec+=["| ","procs"," : ","pFirst"," - ","pLast"," (","nrParallel",") | "] if self.opts.diskusage: spec+=["diskusage"," | "] if self.hasState: spec+=["nowTime"," s ","state"," | "] if self.opts.advancedState: spec+=["lastOutput"," | ","startedAt"," | "] if self.opts.estimateEndTime: if not self.opts.advancedState: spec+=["startedAt"," | "] spec+=["endTimeEstimate"," | "] if self.opts.startEndTime: spec+=["startTime"," | ","endTime"," | "] if useSolverInData: spec+=["solver"," | "] for n,s in customData: spec+=[n," | "] spec+=["name"] for i,l in enumerate(spec): if not l in list(cData[0].keys()): format+=l else: if i<len(spec)-1: format+="%%(%s)%ds" % (l,lens[l]) else: format+="%%(%s)s" % (l) if self.opts.progress: print_("Printing\n\n") header=format % dict(list(zip(list(cData[0].keys()),list(cData[0].keys())))) print_(header) print_("-"*len(header)) for d in cData: for k in list(d.keys()): d[k]=str(d[k]) print_(format % d) if self.opts.diskusage: print_("Total disk-usage:",humanReadableSize(totalDiskusage))
def run(self): dirs=self.parser.getArgs() if len(dirs)==0: dirs=[path.curdir] cData=[] totalDiskusage=0 self.hasState=False for d in dirs: for n in listdir(d): cName=path.join(d,n) if path.isdir(cName): try: sol=SolutionDirectory(cName,archive=None,paraviewLink=False) if sol.isValid(): if self.opts.progress: print_("Processing",cName) data={} data["mtime"]=stat(cName)[ST_MTIME] times=sol.getTimes() try: data["first"]=times[0] except IndexError: data["first"]="None" try: data["last"]=times[-1] except IndexError: data["last"]="None" data["nrSteps"]=len(times) data["procs"]=sol.nrProcs() data["pFirst"]=-1 data["pLast"]=-1 data["nrParallel"]=-1 if self.opts.parallel: pTimes=sol.getParallelTimes() data["nrParallel"]=len(pTimes) if len(pTimes)>0: data["pFirst"]=pTimes[0] data["pLast"]=pTimes[-1] data["name"]=cName data["diskusage"]=-1 if self.opts.diskusage: try: data["diskusage"]=int( subprocess.Popen( ["du","-sb",cName], stdout=subprocess.PIPE, stderr=open(os.devnull,"w") ).communicate()[0].split()[0]) except IndexError: # assume that this du does not support -b data["diskusage"]=int( subprocess.Popen( ["du","-sk",cName], stdout=subprocess.PIPE ).communicate()[0].split()[0])*1024 totalDiskusage+=data["diskusage"] if self.opts.parallel: for f in listdir(cName): if re.compile("processor[0-9]+").match(f): data["mtime"]=max(stat(path.join(cName,f))[ST_MTIME],data["mtime"]) if self.opts.state: try: data["nowTime"]=float(self.readState(sol,"CurrentTime")) except ValueError: data["nowTime"]=None try: data["lastOutput"]=time.mktime(time.strptime(self.readState(sol,"LastOutputSeen"))) except ValueError: data["lastOutput"]="nix" data["state"]=self.readState(sol,"TheState") if self.opts.state or self.opts.estimateEndTime: try: data["startedAt"]=time.mktime(time.strptime(self.readState(sol,"StartedAt"))) except ValueError: data["startedAt"]="nix" if self.opts.startEndTime or self.opts.estimateEndTime: try: ctrlDict=ParsedParameterFile(sol.controlDict(),doMacroExpansion=True) except PyFoamParserError: # Didn't work with Macro expansion. Let's try without ctrlDict=ParsedParameterFile(sol.controlDict()) data["startTime"]=ctrlDict["startTime"] data["endTime"]=ctrlDict["endTime"] if self.opts.estimateEndTime: data["endTimeEstimate"]=None if self.readState(sol,"TheState")=="Running": gone=time.time()-data["startedAt"] try: current=float(self.readState(sol,"CurrentTime")) frac=(current-data["startTime"])/(data["endTime"]-data["startTime"]) except ValueError: frac=0 if frac>0: data["endTimeEstimate"]=data["startedAt"]+gone/frac cData.append(data) except OSError: print_(cName,"is unreadable") if self.opts.progress: print_("Sorting data") if self.opts.reverse: cData.sort(lambda x,y:cmp(y[self.opts.sort],x[self.opts.sort])) else: cData.sort(lambda x,y:cmp(x[self.opts.sort],y[self.opts.sort])) if len(cData)==0: print_("No cases found") return if self.opts.dump: print_(cData) return lens={} for k in list(cData[0].keys()): lens[k]=len(k) for c in cData: for k in ["mtime","lastOutput","startedAt","endTimeEstimate"]: try: if c[k]!=None: if self.opts.relativeTime: c[k]=datetime.timedelta(seconds=long(time.time()-c[k])) else: c[k]=time.asctime(time.localtime(c[k])) except KeyError: pass except TypeError: c[k]=None try: c["diskusage"]=humanReadableSize(c["diskusage"]) except KeyError: pass for k,v in iteritems(c): lens[k]=max(lens[k],len(str(v))) format="" spec=["mtime"," | ","first"," - ","last"," (","nrSteps",") "] if self.opts.parallel: spec+=["| ","procs"," : ","pFirst"," - ","pLast"," (","nrParallel",") | "] if self.opts.diskusage: spec+=["diskusage"," | "] if self.hasState: spec+=["nowTime"," s ","state"," | "] if self.opts.advancedState: spec+=["lastOutput"," | ","startedAt"," | "] if self.opts.estimateEndTime: if not self.opts.advancedState: spec+=["startedAt"," | "] spec+=["endTimeEstimate"," | "] if self.opts.startEndTime: spec+=["startTime"," | ","endTime"," | "] spec+=["name"] for i,l in enumerate(spec): if not l in list(cData[0].keys()): format+=l else: if i<len(spec)-1: format+="%%(%s)%ds" % (l,lens[l]) else: format+="%%(%s)s" % (l) if self.opts.progress: print_("Printing\n\n") header=format % dict(list(zip(list(cData[0].keys()),list(cData[0].keys())))) print_(header) print_("-"*len(header)) for d in cData: for k in list(d.keys()): d[k]=str(d[k]) print_(format % d) if self.opts.diskusage: print_("Total disk-usage:",humanReadableSize(totalDiskusage))
def run(self): casePath = self.parser.casePath() self.checkCase(casePath) # self.addLocalConfig(casePath) self.addToCaseLog(casePath, "Starting") self.prepareHooks() self.processPlotLineOptions(autoPath=casePath) lam = self.getParallel(SolutionDirectory(casePath, archive=None)) isParallel = lam is not None self.lastWrittenTime = None sol = SolutionDirectory(casePath, archive=None, parallel=isParallel) ctrlDict = ParameterFile(sol.controlDict(), backup=False) if ctrlDict.readParameter("startFrom") != "latestTime": self.error( "In", casePath, "the value of 'startFrom' is not 'latestTime' (required for this script)" ) args = self.replaceAutoInArgs(self.parser.getArgs()) def checkRestart(data=None): lastTimeName = sol.getLast() lastTime = float(lastTimeName) ctrlDict = ParameterFile(sol.controlDict(), backup=False) endTime = float(ctrlDict.readParameter("endTime")) if abs(endTime - lastTime) / endTime < 1e-5: return "Reached endTime {}".format(endTime) logfile = calcLogname(self.opts.logname, args) isRestart, restartnr, restartName, lastlog = findRestartFiles( logfile, sol) # TODO: look into the logfile if self.lastWrittenTime is not None: if self.lastWrittenTime == lastTimeName: return "Last restart didn't improve on {}. Further restarts make no sense".format( lastTime) self.lastWrittenTime = lastTimeName if data: if "stepNr" in data and data["stepNr"] < self.opts.minimumSteps: return "Only {} steps done while {} are required".format( data["stepNr"], self.opts.minimumSteps) redo = True reason = checkRestart() if reason is not None: self.warning("Not starting:", reason) redo = False self.checkAndCommit(sol) self.initBlink() startNr = 0 self.setLogname() while redo: startNr += 1 print_() print_("Starting restart nr", startNr, "on case", casePath) print_() self.addToCaseLog(casePath, "Restart nr", startNr, "started") run = AnalyzedRunner(BoundingLogAnalyzer( progress=self.opts.progress, doFiles=self.opts.writeFiles, singleFile=self.opts.singleDataFilesOnly, doTimelines=True), silent=self.opts.progress or self.opts.silent, splitThres=self.opts.splitDataPointsThreshold if self.opts.doSplitDataPoints else None, argv=args, server=self.opts.server, lam=lam, logname=self.opts.logname, compressLog=self.opts.compress, logTail=self.opts.logTail, noLog=self.opts.noLog, remark=self.opts.remark, parameters=self.getRunParameters(), echoCommandLine=self.opts.echoCommandPrefix, jobId=self.opts.jobId) run.createPlots(customRegexp=self.lines_, splitThres=self.opts.splitDataPointsThreshold if self.opts.doSplitDataPoints else None, writeFiles=self.opts.writeFiles) if self.cursesWindow: self.cursesWindow.setAnalyzer(run.analyzer) self.cursesWindow.setRunner(run) run.analyzer.addTimeListener(self.cursesWindow) self.addWriteAllTrigger(run, SolutionDirectory(casePath, archive=None)) self.addLibFunctionTrigger( run, SolutionDirectory(casePath, archive=None)) self.runPreHooks() if self.blink1: run.addTicker(lambda: self.blink1.ticToc()) run.start() if run.data["keyboardInterrupt"]: print_() self.warning("Not restarting because of keyboard interrupt") redo = False self.setData({startNr: run.data}) self.runPostHooks() self.reportUsage(run) self.reportRunnerData(run) self.addToCaseLog(casePath, "Restart nr", startNr, "ended") reason = checkRestart(data=run.data) if reason is not None: print_() self.warning("Not starting:", reason) self.addToCaseLog(casePath, "Stopping because of", reason) redo = False if startNr >= self.opts.maximumRestarts: print_() self.warning("Maximum number", self.opts.maximumRestarts, "restarts reached") self.addToCaseLog(casePath, "Stopping because maximum number", self.opts.maximumRestarts, "of restarts reached") redo = False self.stopBlink() self.addToCaseLog(casePath, "Ended") print_() print_("Ended after", startNr, "restarts") print_()
def run(self): dirs = self.parser.getArgs() if len(dirs) == 0: dirs = [path.curdir] cData = [] totalDiskusage = 0 useSolverInData = False self.hasState = False customData = [] for i, c in enumerate(self.opts.customData): lst = c.split("=") if len(lst) == 2: name, spec = lst name += "_" # Make sure that there is no collision with standard-names elif len(lst) == 1: name, spec = "Custom%d" % (i + 1), c else: self.error("Custom specification", c, "does not fit the pattern 'name=subs1::subs2::..'") customData.append((name, spec.split("::"))) if len(customData) > 0 and not self.opts.solverNameForCustom: self.warning( "Parameter '--solver-name-for-custom-data' should be set if '--custom-data' is used" ) useSolverInData = True elif self.opts.hostname: useSolverInData = True for d in tqdm(dirs, unit="dirs", disable=not self.opts.progressBar or len(dirs) < 2): if not path.isdir(d): self.warning("There is no directory", d, "here") continue for n in tqdm(listdir(d), unit="entries", desc=path.basename(path.abspath(d)), disable=not self.opts.progressBar): if not self.fnmatch(n): continue cName = path.join(d, n) if path.isdir(cName): try: sol = SolutionDirectory(cName, archive=None, paraviewLink=False) if sol.isValid(): if self.opts.progress: print_("Processing", cName) data = {} data["mtime"] = stat(cName)[ST_MTIME] times = sol.getTimes() try: data["first"] = times[0] except IndexError: data["first"] = "None" try: data["last"] = times[-1] except IndexError: data["last"] = "None" data["nrSteps"] = len(times) data["procs"] = sol.nrProcs() data["pFirst"] = -1 data["pLast"] = -1 data["nrParallel"] = -1 if self.opts.parallel: pTimes = sol.getParallelTimes() data["nrParallel"] = len(pTimes) if len(pTimes) > 0: data["pFirst"] = pTimes[0] data["pLast"] = pTimes[-1] data["name"] = cName data["diskusage"] = -1 if self.opts.diskusage: data["diskusage"] = diskUsage(cName) totalDiskusage += data["diskusage"] if self.opts.parallel: for f in listdir(cName): if re.compile("processor[0-9]+").match(f): data["mtime"] = max( stat(path.join(cName, f))[ST_MTIME], data["mtime"]) if self.opts.state: try: data["nowTime"] = float( self.readState(sol, "CurrentTime")) except ValueError: data["nowTime"] = None try: data["lastOutput"] = time.mktime( time.strptime( self.readState( sol, "LastOutputSeen"))) except ValueError: data["lastOutput"] = "nix" data["state"] = self.readState(sol, "TheState") if self.opts.state or self.opts.estimateEndTime: try: data["startedAt"] = time.mktime( time.strptime( self.readState(sol, "StartedAt"))) except ValueError: data["startedAt"] = "nix" if self.opts.startEndTime or self.opts.estimateEndTime: try: ctrlDict = ParsedParameterFile( sol.controlDict(), doMacroExpansion=True) except PyFoamParserError: # Didn't work with Macro expansion. Let's try without try: ctrlDict = ParsedParameterFile( sol.controlDict()) except PyFoamParserError: ctrlDict = None if ctrlDict: data["startTime"] = ctrlDict["startTime"] data["endTime"] = ctrlDict["endTime"] else: data["startTime"] = None data["endTime"] = None if self.opts.estimateEndTime: data["endTimeEstimate"] = None if self.readState(sol, "TheState") == "Running": gone = time.time() - data["startedAt"] try: current = float( self.readState(sol, "CurrentTime")) frac = (current - data["startTime"] ) / (data["endTime"] - data["startTime"]) except ValueError: frac = 0 if frac > 0: data["endTimeEstimate"] = data[ "startedAt"] + gone / frac if self.opts.hgInfo: if path.isdir(path.join(cName, ".hg")): from stat import ST_ATIME prevStat = stat(cName) try: data["hgInfo"] = sub.Popen( [ "hg", "id", "-R", cName, "-b", "-n", "-i" ], stdout=sub.PIPE).communicate( )[0].strip() except OSError: data["hgInfo"] = "<hg not working>" postStat = stat(cName) if prevStat[ST_MTIME] != postStat[ST_MTIME]: # hg seems to modify the modification time of the directory. So reset it os.utime(cName, (postStat[ST_ATIME], prevStat[ST_MTIME])) else: data["hgInfo"] = "<no .hg directory>" if len(customData) > 0 or self.opts.hostname: fn = None pickleFile = None if useSolverInData: data["solver"] = "none found" # try to find the oldest pickled file for f in [ "pickledData", "pickledUnfinishedData", "pickledStartData" ]: dirAndTime = [] for g in glob( path.join(cName, "*.analyzed")): pName = path.join(g, f) base = path.basename(g) if base.find("PyFoamRunner.") == 0: solverName = base[ len("PyFoamRunner." ):-len(".analyzed")] else: solverName = None if path.exists(pName): dirAndTime.append( (path.getmtime(pName), solverName, pName)) dirAndTime.sort(key=lambda x: x[0]) if len(dirAndTime) > 0: data["solver"] = dirAndTime[-1][1] pickleFile = dirAndTime[-1][2] break solverName = data["solver"] else: solverName = self.opts.solverNameForCustom if pickleFile: fn = pickleFile else: for f in [ "pickledData", "pickledUnfinishedData", "pickledStartData" ]: fp = path.join( cName, "PyFoamRunner." + solverName + ".analyzed", f) if path.exists(fp): fn = fp break if fn: raw = pickle.Unpickler(open(fn, "rb")).load() for n, spec in customData: dt = raw for k in spec: try: dt = dt[k] except KeyError: dt = "No key '" + k + "'" break if isinstance(dt, string_types): break data[n] = dt if self.opts.hostname: try: data["hostname"] = raw[ "hostname"].split(".")[0] except KeyError: data["hostname"] = "<unspecified>" else: for n, spec in customData: data[n] = "<no file>" if self.opts.hostname: data["hostname"] = "<no file>" cData.append(data) except OSError: print_(cName, "is unreadable") if self.opts.progress: print_("Sorting data") cData.sort(key=lambda x: x[self.opts.sort], reverse=self.opts.reverse) if len(cData) == 0: print_("No cases found") return if self.opts.dump: print_(cData) return lens = {} for k in list(cData[0].keys()): lens[k] = len(k) for c in cData: for k in ["mtime", "lastOutput", "startedAt", "endTimeEstimate"]: try: if c[k] != None: if self.opts.relativeTime: c[k] = datetime.timedelta( seconds=long(time.time() - c[k])) else: c[k] = time.asctime(time.localtime(c[k])) except KeyError: pass except TypeError: c[k] = None try: c["diskusage"] = humanReadableSize(c["diskusage"]) except KeyError: pass for k, v in iteritems(c): lens[k] = max(lens[k], len(str(v))) format = "" spec = ["mtime", " | "] if self.opts.hostname: spec += ["hostname", " | "] spec += ["first", " - ", "last", " (", "nrSteps", ") "] if self.opts.parallel: spec += [ "| ", "procs", " : ", "pFirst", " - ", "pLast", " (", "nrParallel", ") | " ] if self.opts.diskusage: spec += ["diskusage", " | "] if self.hasState: spec += ["nowTime", " s ", "state", " | "] if self.opts.advancedState: spec += ["lastOutput", " | ", "startedAt", " | "] if self.opts.estimateEndTime: if not self.opts.advancedState: spec += ["startedAt", " | "] spec += ["endTimeEstimate", " | "] if self.opts.startEndTime: spec += ["startTime", " | ", "endTime", " | "] if useSolverInData: spec += ["solver", " | "] for n, s in customData: spec += [n, " | "] if self.opts.hgInfo: spec += ["hgInfo", " | "] spec += ["name"] for i, l in enumerate(spec): if not l in list(cData[0].keys()): format += l else: if i < len(spec) - 1: format += "%%(%s)%ds" % (l, lens[l]) else: format += "%%(%s)s" % (l) if self.opts.progress: print_("Printing\n\n") header = format % dict( list(zip(list(cData[0].keys()), list(cData[0].keys())))) print_(header) print_("-" * len(header)) for d in cData: for k in list(d.keys()): d[k] = str(d[k]) print_(format % d) if self.opts.diskusage: print_("Total disk-usage:", humanReadableSize(totalDiskusage))
def test_with_moga(self): """ moga test with pyopt_driver. create meta and optimization with nsga2. """ if not foamVersionNumber() in [(2,3),(2,2)]: raise unittest.SkipTest("need ver.2.3 or 2.2 for this unittest.") cavityTut = os.path.join(foamTutorials(), "incompressible/icoFoam/cavity") if not os.path.exists(cavityTut): raise unittest.SkipTest("need $FOAM_TUTORIALS/incompressible/cavity \ for unittest.") try: shutil.copytree(cavityTut, os.path.join(self.tmpDir,"cavity")) cavityCase = SolutionDirectory(os.path.join(self.tmpDir,"cavity")) except: raise unittest.SkipTest("can not copy cavity case to temp_dir.") #create Allrun with open(os.path.join(cavityCase.name,"Allrun"),'w') as fp: fp.write('#!/bin/sh\nblockMesh>log.blockMesh\nicoFoam>log.icoFoam\n') os.chmod(os.path.join(cavityCase.name,"Allrun"),0777) #append controlDict fObj=""" functions { probes { type probes; functionObjectLibs ("libsampling.so"); enabled true; outputControl timeStep; outputInterval 1; fields ( p U ); probeLocations ( ( 0.015 0.015 0.005 ) ( 0.085 0.015 0.005 ) ); } } """ with open(cavityCase.controlDict(),'a') as fp: fp.write(fObj) #test start sim = set_as_top(MultiObjectiveCavity()) sim.DeformationCavity_meta.model.case_dir = cavityCase.name sim.DeformationCavity.case_dir = cavityCase.name try: sim.NSGA2.optimizer = 'NSGA2' except ValueError: raise SkipTest("NSGA2 not present on this system") sim.NSGA2.options['PrintOut'] = 0 sim.run()