def _readParameter(self, name): """Reads a parametr from the controlDict :param name: the parameter :return: The value""" control = ParameterFile( self._master.getSolutionDirectory().controlDict()) return control.readParameter(name)
def _writeStopAt(self,value,message): """Write stopAt to stop the run gracefully""" if not self.stopMe: self.stopMe=True if not self.isRestarted: if self.controlDict: warning("The controlDict has already been modified. Restoring will be problementic") self.controlDict=ParameterFile(path.join(self.dir,"system","controlDict"),backup=True) self.controlDict.replaceParameter("stopAt",value) warning(message)
def stopGracefully(self): """Tells the runner to stop at the next convenient time""" if not self.stopMe: self.stopMe=True if not self.isRestarted: if self.controlDict: warning("The controlDict has already been modified. Restoring will be problementic") self.controlDict=ParameterFile(path.join(self.dir,"system","controlDict"),backup=True) self.controlDict.replaceParameter("stopAt","writeNow") warning("Stopping run at next write")
def writeResults(self): """Writes the next possible time-step""" # warning("writeResult is not yet implemented") if not self.writeRequested: if not self.isRestarted: if self.controlDict: warning("The controlDict has already been modified. Restoring will be problementic") self.controlDict=ParameterFile(path.join(self.dir,"system","controlDict"),backup=True) self.controlDict.replaceParameter("writeControl","timeStep") self.controlDict.replaceParameter("writeInterval","1") self.writeRequested=True
def testParameterFileWrite(self): par = ParameterFile(path.join(self.dest, "system", "controlDict"), backup=True) self.assertEqual(par.readParameter("startTime"), "0") par.replaceParameter("startTime", "42") self.assertEqual(par.readParameter("startTime"), "42") par.restore() self.assertEqual(par.readParameter("startTime"), "0")
def testParameterFileWrite(self): par=ParameterFile(path.join(self.dest,"system","controlDict"),backup=True) self.assertEqual(par.readParameter("startTime"),"0") par.replaceParameter("startTime","42") self.assertEqual(par.readParameter("startTime"),"42") par.restore() self.assertEqual(par.readParameter("startTime"),"0")
def timeChanged(self): self.nSteps += 1 self.currTime = self.analyzer.time self.progressString = self.analyzer.progressOut.lastProgress() if self.analyzer.hasAnalyzer("Execution"): self.clockTime = self.analyzer.getAnalyzer( "Execution").clockTotal() if self.startTime is None: if self.runner: self.startTime = self.runner.createTime else: self.startTime = self.analyzer.getAnalyzer("Time").createTime() if self.endTime is None: sol = None if self.runner: sol = self.runner.getSolutionDirectory() else: if self.analyzer.hasAnalyzer("ExecName"): caseName = self.analyzer.getAnalyzer("ExecName").caseName if caseName and path.isdir(caseName): from PyFoam.RunDictionary.SolutionDirectory import SolutionDirectory sol = SolutionDirectory(caseName, paraviewLink=False) if sol: from PyFoam.RunDictionary.ParameterFile import ParameterFile control = ParameterFile(sol.controlDict()) try: self.endTime = float(control.readParameter("endTime")) except ValueError: self.endTime = -1 if self.caseName is None or self.execName is None: if self.analyzer.hasAnalyzer("ExecName"): self.caseName = self.analyzer.getAnalyzer("ExecName").caseName self.execName = self.analyzer.getAnalyzer("ExecName").execName self.headerChanged = True from PyFoam.LogAnalysis.LogLineAnalyzer import LogLineAnalyzer for e in LogLineAnalyzer.allRegexp: addExpr(e) if self.firstTime: self.update(resize=True) self.firstTime = False else: self._checkHeaders(force=True)
def timeChanged(self): self.nSteps+=1 self.currTime=self.analyzer.time self.progressString=self.analyzer.progressOut.lastProgress() if self.analyzer.hasAnalyzer("Execution"): self.clockTime=self.analyzer.getAnalyzer("Execution").clockTotal() if self.startTime is None: if self.runner: self.startTime=self.runner.createTime else: self.startTime=self.analyzer.getAnalyzer("Time").createTime() if self.endTime is None: sol=None if self.runner: sol=self.runner.getSolutionDirectory() else: if self.analyzer.hasAnalyzer("ExecName"): caseName=self.analyzer.getAnalyzer("ExecName").caseName if caseName and path.isdir(caseName): from PyFoam.RunDictionary.SolutionDirectory import SolutionDirectory sol=SolutionDirectory(caseName,paraviewLink=False) if sol: from PyFoam.RunDictionary.ParameterFile import ParameterFile control=ParameterFile(sol.controlDict()) try: self.endTime=float(control.readParameter("endTime")) except ValueError: self.endTime=-1 if self.caseName is None or self.execName is None: if self.analyzer.hasAnalyzer("ExecName"): self.caseName=self.analyzer.getAnalyzer("ExecName").caseName self.execName=self.analyzer.getAnalyzer("ExecName").execName self.headerChanged=True from PyFoam.LogAnalysis.LogLineAnalyzer import LogLineAnalyzer for e in LogLineAnalyzer.allRegexp: addExpr(e) if self.firstTime: self.update(resize=True) self.firstTime=False else: self._checkHeaders(force=True)
def modFieldBcType(self, fieldPath, patch, newBcType): file = ParameterFile(fieldPath) file.readFile() exp = re.compile(patch + r".*?\{(.*?)\}", re.DOTALL) file.readFile() [newStr, num] = exp.subn("%s\n {\n%s\n }\n" % (patch, newBcType), file.content) if num == 0: self.logger.error("Patch: " + patch + "not found in " + file.name + " could not modify bc") sys.exit(1) else: file.content = newStr file.writeFile()
def readFieldBcType(self, fieldPath, patch): file = ParameterFile(fieldPath) file.readFile() exp = re.compile( "(" + patch + r"\s*?\n\s*?\{.*?type)(\s*?)(.*?)(;.*?\})", re.DOTALL) bcMatch = exp.search(file.content) if bcMatch is None: self.logger.debug("Could not find patch: " + patch + " in file: " + file.name) try: patchType = bcMatch.group(3) except: self.logger.error("Could not get patch type from file: " + file.name + " check the file!") sys.exit(1) return patchType.strip()
def checkRestart(data=None): lastTimeName = sol.getLast() lastTime = float(lastTimeName) ctrlDict = ParameterFile(sol.controlDict(), backup=False) endTime = float(ctrlDict.readParameter("endTime")) if abs(endTime - lastTime) / endTime < 1e-5: return "Reached endTime {}".format(endTime) logfile = calcLogname(self.opts.logname, args) isRestart, restartnr, restartName, lastlog = findRestartFiles( logfile, sol) # TODO: look into the logfile if self.lastWrittenTime is not None: if self.lastWrittenTime == lastTimeName: return "Last restart didn't improve on {}. Further restarts make no sense".format( lastTime) self.lastWrittenTime = lastTimeName if data: if "stepNr" in data and data["stepNr"] < self.opts.minimumSteps: return "Only {} steps done while {} are required".format( data["stepNr"], self.opts.minimumSteps)
def findRestartFiles(logfile,sol): isRestart=False restartnr=None restartName=None lastlog=None if path.exists(logfile) or path.exists(logfile+".gz"): ctrlDict=ParameterFile(path.join(sol.controlDict()),backup=False) if ctrlDict.readParameter("startFrom")=="latestTime": if len(sol)>1: isRestart=True restartnr=0 restartName=logfile+".restart00" lastlog=logfile while path.exists(restartName) or path.exists(restartName+".gz"): restartnr+=1 lastlog=restartName restartName=logfile+".restart%02d" % restartnr return isRestart,restartnr,restartName,lastlog
def findRestartFiles(logfile, sol): isRestart = False restartnr = None restartName = None lastlog = None if path.exists(logfile) or path.exists(logfile + ".gz"): ctrlDict = ParameterFile(path.join(sol.controlDict()), backup=False) if ctrlDict.readParameter("startFrom") == "latestTime": if len(sol) > 1: isRestart = True restartnr = 0 restartName = logfile + ".restart00" lastlog = logfile while path.exists(restartName) or path.exists(restartName + ".gz"): restartnr += 1 lastlog = restartName restartName = logfile + ".restart%02d" % restartnr return isRestart, restartnr, restartName, lastlog
def readFieldBcType(self, fieldPath, patch): file = ParameterFile(fieldPath) file.readFile() exp = re.compile( "(" + patch + r"\s*?\n\s*?\{.*?type)(\s*?)(.*?)(;.*?\})", re.DOTALL ) bcMatch = exp.search(file.content) if bcMatch is None: self.logger.debug( "Could not find patch: " + patch + " in file: " + file.name ) try: patchType = bcMatch.group(3) except: self.logger.error( "Could not get patch type from file: " + file.name + " check the file!" ) sys.exit(1) return patchType.strip()
def modFieldBcType(self, fieldPath, patch, newBcType): file = ParameterFile(fieldPath) file.readFile() exp = re.compile(patch + r".*?\{(.*?)\}", re.DOTALL) file.readFile() [newStr, num] = exp.subn( "%s\n {\n%s\n }\n" % (patch, newBcType), file.content ) if num == 0: self.logger.error( "Patch: " + patch + "not found in " + file.name + " could not modify bc" ) sys.exit(1) else: file.content = newStr file.writeFile()
def main(): parser = OptionParser(usage=usage, version=version) parser.add_option("-q", "--quiet", action="store_true", dest="quiet", default=False, help="Only print warnings and errors") parser.add_option("-t", "--template", action="store", dest="controlfile", default=None, help="Generate default controlfile") parser.add_option("-l", "--logfile", action="store", dest="logfile", default=None, help="Writes output to logfile") parser.add_option("-d", "--debug", action="store_true", dest="debug", default=False, help="Writes output to logfile") parser.add_option("-c", "--case", action="store", dest="case", default=None, help="Specifies case directory") (options, args) = parser.parse_args() rootLogger = logging.getLogger('') logger = logging.getLogger('canopyRunner') reportLevel = logging.INFO if options.quiet: reportLevel = logging.WARNING if options.debug: reportLevel = logging.DEBUG rootLogger.setLevel(reportLevel) if options.logfile == None: console = logging.StreamHandler() console.setLevel(reportLevel) formatter = logging.Formatter( '%(name)-12s: %(levelname)-8s %(message)s') console.setFormatter(formatter) rootLogger.addHandler(console) if options.controlfile != None: generateCf(path.abspath(options.controlfile)) print "Wrote default controlfile" sys.exit() if options.logfile != None: logFileName = path.abspath(options.logfile) if not path.exists(path.dirname(logFileName)): print "Bad argument, directory for logfile does not exist" sys.exit() logfile = logging.FileHandler(logFileName, "w") logfile.setLevel(reportLevel) formatter = logging.Formatter( '%(name)-12s: %(levelname)-8s %(message)s') logfile.setFormatter(formatter) rootLogger.addHandler(logfile) if len(args) != 1: parser.error("Incorrect number of arguments") cf = ControlFile.ControlFile(fileName=path.abspath(args[0])) if options.case != None: casePath = path.abspath(options.case) else: casePath = os.getcwd() caseName = path.basename(casePath) ch = CaseHandler.CaseHandler(casePath) wspeeds = cf.findScalarList("wspeeds:", optional=False) wdirs = cf.findScalarList("wdirs:", optional=False) iterations = cf.findScalar("iterations:", optional=False) inletProfile_z0 = cf.findScalar("z0:", optional=False) fieldsToArchive = cf.findStringList("fieldsToArchive:", optional=False) archiveDirName = cf.findString("flowArchiveDirName:", optional=False) restoreArchived = cf.findBoolean("restoreArchived:", optional=True, default=False) archiveVTK = cf.findBoolean("archiveVTK:", optional=False) VTKArchiveDir = cf.findExistingPath("VTKArchiveDir:", optional=False) nodes = int(cf.findScalar("nodes:", optional=False)) CPUs = cf.findScalar("CPUs:", optional=True) if CPUs == None: nprocesses = 8 * nodes else: nprocesses = int(CPUs) #----------------------------------- solver = cf.findString("solver:", default="windFoam") softStart = cf.findString("softstart_application:", optional=True) initCmds = cf.findStringList("initialize:", default=["setWindInlet"]) flowArchive = FoamArchive.FoamArchive(casePath, archiveDirName) nwdir = len(wdirs) convTable = ConvergenceTable.ConvergenceTable(casePath) logger.info("Running windRunner.py") logger.info("Setup overview:") logger.info(25 * "-") logger.info("Case: " + caseName) logger.info(25 * "-") logger.info("Wind directions are: " + str(wdirs)) logger.info("Wind speeds are: " + str(wspeeds)) nruns = nwdir * len(wspeeds) logger.info("Total number of runs: " + str(nruns)) logger.info(25 * "-") logger.info("Number of iterations are: " + str(iterations)) logger.info("Number of nodes are: " + str(nodes)) logger.info("Fields to be archived: " + str(fieldsToArchive)) logger.info("ArchiveToVTK is set to: " + str(archiveVTK)) logger.info(50 * "=") controlDict = ParameterFile(ch.controlDict()) windDict = ParameterFile(path.join(ch.constantDir(), "windDict")) RASDict = ParameterFile(path.join(ch.constantDir(), "RASProperties")) compression = controlDict.readParameter("writeCompression") if compression == "compressed" or compression == "on": filesToArchive = [field + ".gz" for field in fieldsToArchive] else: filesToArchive = fieldsToArchive if not path.exists(VTKArchiveDir) and archiveVTK: logger.error("The VTKArchiveDir does not exist") sys.exit() #booting lammachine for parallell execution if nprocesses > 1: Lam = LAMMachine(nr=nprocesses) Lam.writeScotch(ch) controlDict.replaceParameter("stopAt", "nextWrite") timeLeft = iterations * nruns * 20 timeSpent = 05 timeCase = iterations * 20 timeEstimated = time.localtime(time.time() + timeLeft) casesRun = 0 casesLeft = nruns ch.backUpInitialFields() logger.info("Backup made of initial fields") for wspeed in wspeeds: for wdir in wdirs: timeInit = time.time() controlDict.replaceParameter("writeInterval", str(iterations)) logger.info("Running calculations for dir: " + str(wdir) + " speed: " + str(wspeed)) logger.info("Time left: " + str(timeLeft / 60.0) + "min, Time spent: " + str(timeSpent / 60.0) + "min") logger.info("Estimated time for finish: " + str(timeEstimated[:4])) logger.info("Cases finished: " + str(casesRun) + " cases left: " + str(casesLeft)) logger.info(" ") ch.clearResults() dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) logger.info("restoreArchived = " + str(restoreArchived)) if restoreArchived and flowArchive.inArchive(dirName=dirName): logger.info("Restoring archived flow fields") flowArchive.restore(dirName, fieldsToArchive, ch.initialDir()) for filename in fieldsToArchive: flowArchive.getFile(outputFile=path.join( ch.initialDir(), filename), fileName=filename, archiveDirName=dirName) logger.info("Restored archived flow fields!") else: logger.info("...Modifying bc:s") ch.modWindDir(ch.initialDir(), wdir) logger.info("bc:s modified!") logger.info("...Setting inlet profiles") windDict.replaceParameter("U10", str(wspeed)) windDict.replaceParameter("windDirection", str(wdir)) windDict.replaceParameter("z0", str(inletProfile_z0)) for initCmd in initCmds: initUtil = UtilityRunner(argv=[initCmd, "-case", casePath], silent=True, logname=initCmd) initUtil.start() if initUtil.runOK(): logger.info("Successfully finished: %s" % initCmd) else: logger.error("Error when running: %s" % initCmd) sys.exit() if nprocesses > 1: if Lam.machineOK(): decomposeCmd = "decomposePar" decomposeUtil = UtilityRunner( argv=[decomposeCmd, "-case", casePath], silent=True, logname="decomposePar") logger.info("...Decomposing case to run on" + str(Lam.cpuNr()) + str(" of processors")) decomposeUtil.start() if decomposeUtil.runOK(): logger.info("Case decomposed!") else: logger.error("Error when running decomposePar") sys.exit() else: logger.error("Error: Could not start lam-machine") sys.exit() else: Lam = None logger.info("Serial Run chosen!") if softStart != None: RASDict.replaceParameter("RASModel", "kEpsilon") controlDict.replaceParameter("stopAt", "nextWrite") controlDict.replaceParameter("writeInterval", "50") logger.info("...Softstarting using " + softStart) windFoamSolver = ConvergenceRunner( StandardLogAnalyzer(), argv=[softStart, "-case", casePath], silent=True, lam=Lam, logname=softStart) windFoamSolver.start() if windFoamSolver.runOK(): logger.info("Iterations finished for windFoam") else: logger.error("Error while running windFoam") sys.exit() RASDict.replaceParameter("RASModel", "kEpsilon_canopy") controlDict.replaceParameter("writeInterval", str(iterations)) # The folowing line is to copy the landuse and LAD-files after the first iterations with simpleFoam ch.execute( "for file in " + os.path.join(casePath, "processor*/0/[Ll]*") + r'; do for folder in ${file%0*}*; do [ -e ${folder}/`basename ${file}` ] || cp $file ${folder}/`basename ${file}`; done; done' ) logger.info("...Running solver for wind field") windFoamSolver = ConvergenceRunner( StandardLogAnalyzer(), argv=[solver, "-case", casePath], silent=True, lam=Lam, logname=solver) windFoamSolver.start() if windFoamSolver.runOK(): logger.info("Iterations finished for windFoam") else: logger.error("Error while running windFoam") sys.exit() if nprocesses > 1: logger.info("Reconstructing decomposed case...") reconstructCmd = "reconstructPar" reconstructUtil = UtilityRunner( argv=[reconstructCmd, "-latestTime", "-case", casePath], silent=True, logname="reconstrucPar") reconstructUtil.start() if reconstructUtil.runOK(): logger.info("recunstruction ready!") else: logger.error("Error while running recontructPar") sys.exit() logger.info("Removing decomposed mesh") ch.execute("rm -r " + os.path.join(casePath, "processor*")) logger.info("Removed decomposed mesh!") convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_Ux", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_Uy", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_k", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_epsilon", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "U", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "k", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "epsilon", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "p", casesRun + 1) logger.info("Archiving results from directory: %s" % ch.latestDir()) #save latest concentration result files solFiles = [ file for file in os.listdir(ch.latestDir()) if file in filesToArchive ] for filename in solFiles: dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) flowArchive.addFile(path.join(ch.latestDir(), filename), dirName=dirName) if archiveVTK: #Creating a temporary last time directory to be used by foamToVTK VTKTime = str(eval(path.basename(ch.latestDir())) + 1) newTimeDir = path.join(casePath, VTKTime) os.mkdir(newTimeDir) for filename in solFiles: oldFile = path.join(casePath, str(eval(ch.getLast()) - 1), filename) ch.execute("cp " + oldFile + " " + newTimeDir + "/") foamToVTKUtil = UtilityRunner( argv=["foamToVTK", "-case", casePath, "-time " + VTKTime], silent=True, logname="foamToVTK") foamToVTKUtil.start() if foamToVTKUtil.runOK(): ch.execute("mv " + path.join(casePath, "VTK") + " " + path.join( VTKArchiveDir, "VTK" + "_wspeed_" + str(wspeed) + "_wdir_" + str(wdir))) ch.execute("rm -r " + path.join(casePath, VTKTime)) logger.info("Exported to VTK archive!") else: logger.error("Error when exporting to VTK") sys.exit() logger.info("Finished wdir: " + str(wdir) + " wspeed: " + str(wspeed) + "Last iter = " + ch.getLast()) logger.info(" ") casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) ch.clearResults() logger.info("Cleared all result directories exept: %s" % (" ".join(ch.getTimes()))) ch.restoreInitialFields() logger.info("Restored initital fields from backup copy") #restoring windData dictionary to original state windDict.purgeFile() convTable.writeProbes() convTable.writeResiduals() logger.info( "Residuals and probes from solver windFoam written to case/convergence directory" ) #Restoring controlDict to original state controlDict.purgeFile() logger.info("Finished batch calculation!")
def _readParameter(self,name): """Reads a parametr from the controlDict @param name: the parameter @return: The value""" control=ParameterFile(self._master.getSolutionDirectory().controlDict()) return control.readParameter(name)
def __init__(self, argv=None, silent=False, logname=None, compressLog=False, lam=None, server=False, restart=False, noLog=False, logTail=None, remark=None, jobId=None, parameters=None, writeState=True, echoCommandLine=None): """@param argv: list with the tokens that are the command line if not set the standard command line is used @param silent: if True no output is sent to stdout @param logname: name of the logfile @param compressLog: Compress the logfile into a gzip @param lam: Information about a parallel run @param server: Whether or not to start the network-server @type lam: PyFoam.Execution.ParallelExecution.LAMMachine @param noLog: Don't output a log file @param logTail: only the last lines of the log should be written @param remark: User defined remark about the job @param parameters: User defined dictionary with parameters for documentation purposes @param jobId: Job ID of the controlling system (Queueing system) @param writeState: Write the state to some files in the case @param echoCommandLine: Prefix that is printed with the command line. If unset nothing is printed """ if sys.version_info < (2,3): # Python 2.2 does not have the capabilities for the Server-Thread if server: warning("Can not start server-process because Python-Version is too old") server=False if argv==None: self.argv=sys.argv[1:] else: self.argv=argv if oldApp(): self.dir=path.join(self.argv[1],self.argv[2]) if self.argv[2][-1]==path.sep: self.argv[2]=self.argv[2][:-1] else: self.dir=path.curdir if "-case" in self.argv: self.dir=self.argv[self.argv.index("-case")+1] if logname==None: logname="PyFoam."+path.basename(argv[0]) try: sol=self.getSolutionDirectory() except OSError: e = sys.exc_info()[1] # compatible with 2.x and 3.x error("Solution directory",self.dir,"does not exist. No use running. Problem:",e) self.echoCommandLine=echoCommandLine self.silent=silent self.lam=lam self.origArgv=self.argv self.writeState=writeState self.__lastLastSeenWrite=0 self.__lastNowTimeWrite=0 if self.lam!=None: self.argv=lam.buildMPIrun(self.argv) if config().getdebug("ParallelExecution"): debug("Command line:"," ".join(self.argv)) self.cmd=" ".join(self.argv) foamLogger().info("Starting: "+self.cmd+" in "+path.abspath(path.curdir)) self.logFile=path.join(self.dir,logname+".logfile") self.noLog=noLog self.logTail=logTail if self.logTail: if self.noLog: warning("Log tail",self.logTail,"and no-log specified. Using logTail") self.noLog=True self.lastLines=[] self.compressLog=compressLog if self.compressLog: self.logFile+=".gz" self.fatalError=False self.fatalFPE=False self.fatalStackdump=False self.warnings=0 self.started=False self.isRestarted=False if restart: self.controlDict=ParameterFile(path.join(self.dir,"system","controlDict"),backup=True) self.controlDict.replaceParameter("startFrom","latestTime") self.isRestarted=True else: self.controlDict=None self.run=FoamThread(self.cmd,self) self.server=None if server: self.server=FoamServer(run=self.run,master=self) self.server.setDaemon(True) self.server.start() try: IP,PID,Port=self.server.info() f=open(path.join(self.dir,"PyFoamServer.info"),"w") print_(IP,PID,Port,file=f) f.close() except AttributeError: warning("There seems to be a problem with starting the server:",self.server,"with attributes",dir(self.server)) self.server=None self.createTime=None self.nowTime=None self.startTimestamp=time() self.stopMe=False self.writeRequested=False self.endTriggers=[] self.lastLogLineSeen=None self.lastTimeStepSeen=None self.remark=remark self.jobId=jobId self.data={"lines":0} # self.data={"lines":0L} self.data["logfile"]=self.logFile self.data["casefullname"]=path.abspath(self.dir) self.data["casename"]=path.basename(path.abspath(self.dir)) self.data["solver"]=path.basename(self.argv[0]) self.data["solverFull"]=self.argv[0] self.data["commandLine"]=self.cmd self.data["hostname"]=uname()[1] if remark: self.data["remark"]=remark else: self.data["remark"]="No remark given" if jobId: self.data["jobId"]=jobId parameterFile=sol.getParametersFromFile() if len(parameterFile): self.data["parameters"]={} for k,v in parameterFile.items(): self.data["parameters"][k]=makePrimitiveString(v) if parameters: if "parameters" not in self.data: self.data["parameters"]={} self.data["parameters"].update(parameters) self.data["starttime"]=asctime()
def testParameterReadWithTab(self): par=ParameterFile(path.join(self.dest,"system","controlDict")) par.replaceParameter("startTime"," 42") self.assertEqual(par.readParameter("startTime"),"42") par.replaceParameter("startTime","\t 42") self.assertEqual(par.readParameter("startTime"),"42")
class BasicRunner(object): """Base class for the running of commands When the command is run the output is copied to a LogFile and (optionally) standard-out The argument list assumes for the first three elements the OpenFOAM-convention: <cmd> <dir> <case> The directory name for outputs is therefor created from <dir> and <case> Provides some handle-methods that are to be overloaded for additional functionality""" def __init__(self, argv=None, silent=False, logname=None, compressLog=False, lam=None, server=False, restart=False, noLog=False, logTail=None, remark=None, jobId=None, parameters=None, writeState=True, echoCommandLine=None): """@param argv: list with the tokens that are the command line if not set the standard command line is used @param silent: if True no output is sent to stdout @param logname: name of the logfile @param compressLog: Compress the logfile into a gzip @param lam: Information about a parallel run @param server: Whether or not to start the network-server @type lam: PyFoam.Execution.ParallelExecution.LAMMachine @param noLog: Don't output a log file @param logTail: only the last lines of the log should be written @param remark: User defined remark about the job @param parameters: User defined dictionary with parameters for documentation purposes @param jobId: Job ID of the controlling system (Queueing system) @param writeState: Write the state to some files in the case @param echoCommandLine: Prefix that is printed with the command line. If unset nothing is printed """ if sys.version_info < (2,3): # Python 2.2 does not have the capabilities for the Server-Thread if server: warning("Can not start server-process because Python-Version is too old") server=False if argv==None: self.argv=sys.argv[1:] else: self.argv=argv if oldApp(): self.dir=path.join(self.argv[1],self.argv[2]) if self.argv[2][-1]==path.sep: self.argv[2]=self.argv[2][:-1] else: self.dir=path.curdir if "-case" in self.argv: self.dir=self.argv[self.argv.index("-case")+1] if logname==None: logname="PyFoam."+path.basename(argv[0]) try: sol=self.getSolutionDirectory() except OSError: e = sys.exc_info()[1] # compatible with 2.x and 3.x error("Solution directory",self.dir,"does not exist. No use running. Problem:",e) self.echoCommandLine=echoCommandLine self.silent=silent self.lam=lam self.origArgv=self.argv self.writeState=writeState self.__lastLastSeenWrite=0 self.__lastNowTimeWrite=0 if self.lam!=None: self.argv=lam.buildMPIrun(self.argv) if config().getdebug("ParallelExecution"): debug("Command line:"," ".join(self.argv)) self.cmd=" ".join(self.argv) foamLogger().info("Starting: "+self.cmd+" in "+path.abspath(path.curdir)) self.logFile=path.join(self.dir,logname+".logfile") self.noLog=noLog self.logTail=logTail if self.logTail: if self.noLog: warning("Log tail",self.logTail,"and no-log specified. Using logTail") self.noLog=True self.lastLines=[] self.compressLog=compressLog if self.compressLog: self.logFile+=".gz" self.fatalError=False self.fatalFPE=False self.fatalStackdump=False self.warnings=0 self.started=False self.isRestarted=False if restart: self.controlDict=ParameterFile(path.join(self.dir,"system","controlDict"),backup=True) self.controlDict.replaceParameter("startFrom","latestTime") self.isRestarted=True else: self.controlDict=None self.run=FoamThread(self.cmd,self) self.server=None if server: self.server=FoamServer(run=self.run,master=self) self.server.setDaemon(True) self.server.start() try: IP,PID,Port=self.server.info() f=open(path.join(self.dir,"PyFoamServer.info"),"w") print_(IP,PID,Port,file=f) f.close() except AttributeError: warning("There seems to be a problem with starting the server:",self.server,"with attributes",dir(self.server)) self.server=None self.createTime=None self.nowTime=None self.startTimestamp=time() self.stopMe=False self.writeRequested=False self.endTriggers=[] self.lastLogLineSeen=None self.lastTimeStepSeen=None self.remark=remark self.jobId=jobId self.data={"lines":0} # self.data={"lines":0L} self.data["logfile"]=self.logFile self.data["casefullname"]=path.abspath(self.dir) self.data["casename"]=path.basename(path.abspath(self.dir)) self.data["solver"]=path.basename(self.argv[0]) self.data["solverFull"]=self.argv[0] self.data["commandLine"]=self.cmd self.data["hostname"]=uname()[1] if remark: self.data["remark"]=remark else: self.data["remark"]="No remark given" if jobId: self.data["jobId"]=jobId parameterFile=sol.getParametersFromFile() if len(parameterFile): self.data["parameters"]={} for k,v in parameterFile.items(): self.data["parameters"][k]=makePrimitiveString(v) if parameters: if "parameters" not in self.data: self.data["parameters"]={} self.data["parameters"].update(parameters) self.data["starttime"]=asctime() def appendTailLine(self,line): """Append lines to the tail of the log""" if len(self.lastLines)>10*self.logTail: # truncate the lines, but not too often self.lastLines=self.lastLines[-self.logTail:] self.writeTailLog() self.lastLines.append(line+"\n") def writeTailLog(self): """Write the last lines to the log""" fh=open(self.logFile,"w") if len(self.lastLines)<=self.logTail: fh.writelines(self.lastLines) else: fh.writelines(self.lastLines[-self.logTail:]) fh.close() def start(self): """starts the command and stays with it till the end""" self.started=True if not self.noLog: if self.compressLog: fh=gzip.open(self.logFile,"w") else: fh=open(self.logFile,"w") self.startHandle() self.writeStartTime() self.writeTheState("Running") check=BasicRunnerCheck() if self.echoCommandLine: print_(self.echoCommandLine+" "+" ".join(self.argv)) self.run.start() interrupted=False totalWarningLines=0 addLinesToWarning=0 collectWarnings=True while self.run.check(): try: self.run.read() if not self.run.check(): break line=self.run.getLine() if "errorText" in self.data: self.data["errorText"]+=line+"\n" if addLinesToWarning>0: self.data["warningText"]+=line+"\n" addLinesToWarning-=1 totalWarningLines+=1 if totalWarningLines>500: collectWarnings=False addLinesToWarning=0 self.data["warningText"]+="No more warnings added because limit of 500 lines exceeded" self.data["lines"]+=1 self.lastLogLineSeen=time() self.writeLastSeen() tmp=check.getTime(line) if check.controlDictRead(line): if self.writeRequested: duration=config().getfloat("Execution","controlDictRestoreWait",default=30.) warning("Preparing to reset controlDict to old glory in",duration,"seconds") Timer(duration, restoreControlDict, args=[self.controlDict,self]).start() self.writeRequested=False if tmp!=None: self.data["time"]=tmp self.nowTime=tmp self.writeTheState("Running",always=False) self.writeNowTime() self.lastTimeStepSeen=time() if self.createTime==None: # necessary because interFoam reports no creation time self.createTime=tmp try: self.data["stepNr"]+=1 except KeyError: self.data["stepNr"]=1 # =1L self.data["lasttimesteptime"]=asctime() tmp=check.getCreateTime(line) if tmp!=None: self.createTime=tmp if not self.silent: try: print_(line) except IOError: e = sys.exc_info()[1] # compatible with 2.x and 3.x if e.errno!=32: raise e else: # Pipe was broken self.run.interrupt() if line.find("FOAM FATAL ERROR")>=0 or line.find("FOAM FATAL IO ERROR")>=0: self.fatalError=True self.data["errorText"]="PyFoam found a Fatal Error " if "time" in self.data: self.data["errorText"]+="at time "+str(self.data["time"])+"\n" else: self.data["errorText"]+="before time started\n" self.data["errorText"]+="\n"+line+"\n" if line.find("Foam::sigFpe::sigFpeHandler")>=0: self.fatalFPE=True if line.find("Foam::error::printStack")>=0: self.fatalStackdump=True if self.fatalError and line!="": foamLogger().error(line) if line.find("FOAM Warning")>=0: self.warnings+=1 try: self.data["warnings"]+=1 except KeyError: self.data["warnings"]=1 if collectWarnings: addLinesToWarning=20 if not "warningText" in self.data: self.data["warningText"]="" else: self.data["warningText"]+=("-"*40)+"\n" self.data["warningText"]+="Warning found by PyFoam on line " self.data["warningText"]+=str(self.data["lines"])+" " if "time" in self.data: self.data["warningText"]+="at time "+str(self.data["time"])+"\n" else: self.data["warningText"]+="before time started\n" self.data["warningText"]+="\n"+line+"\n" if self.server!=None: self.server._insertLine(line) self.lineHandle(line) if not self.noLog: fh.write(line+"\n") fh.flush() elif self.logTail: self.appendTailLine(line) except KeyboardInterrupt: e = sys.exc_info()[1] # compatible with 2.x and 3.x foamLogger().warning("Keyboard Interrupt") self.run.interrupt() self.writeTheState("Interrupted") interrupted=True self.data["interrupted"]=interrupted self.data["OK"]=self.runOK() self.data["cpuTime"]=self.run.cpuTime() self.data["cpuUserTime"]=self.run.cpuUserTime() self.data["cpuSystemTime"]=self.run.cpuSystemTime() self.data["wallTime"]=self.run.wallTime() self.data["usedMemory"]=self.run.usedMemory() self.data["endtime"]=asctime() self.data["fatalError"]=self.fatalError self.data["fatalFPE"]=self.fatalFPE self.data["fatalStackdump"]=self.fatalStackdump self.writeNowTime(force=True) self.stopHandle() if not interrupted: self.writeTheState("Finished") for t in self.endTriggers: t() if not self.noLog: fh.close() elif self.logTail: self.writeTailLog() if self.server!=None: self.server.deregister() self.server.kill() foamLogger().info("Finished") return self.data def writeToStateFile(self,fName,message): """Write a message to a state file""" if self.writeState: open(path.join(self.dir,"PyFoamState."+fName),"w").write(message+"\n") def writeStartTime(self): """Write the real time the run was started at""" self.writeToStateFile("StartedAt",asctime()) def writeTheState(self,state,always=True): """Write the current state the run is in""" if always or (time()-self.__lastLastSeenWrite)>9: self.writeToStateFile("TheState",state) def writeLastSeen(self): if (time()-self.__lastLastSeenWrite)>10: self.writeToStateFile("LastOutputSeen",asctime()) self.__lastLastSeenWrite=time() def writeNowTime(self,force=False): if (time()-self.__lastNowTimeWrite)>10 or force: self.writeToStateFile("CurrentTime",str(self.nowTime)) self.__lastNowTimeWrite=time() def runOK(self): """checks whether the run was successful""" if self.started: return not self.fatalError and not self.fatalFPE and not self.fatalStackdump # and self.run.getReturnCode()==0 else: return False def startHandle(self): """to be called before the program is started""" pass def stopGracefully(self): """Tells the runner to stop at the next convenient time""" if not self.stopMe: self.stopMe=True if not self.isRestarted: if self.controlDict: warning("The controlDict has already been modified. Restoring will be problementic") self.controlDict=ParameterFile(path.join(self.dir,"system","controlDict"),backup=True) self.controlDict.replaceParameter("stopAt","writeNow") warning("Stopping run at next write") def writeResults(self): """Writes the next possible time-step""" # warning("writeResult is not yet implemented") if not self.writeRequested: if not self.isRestarted: if self.controlDict: warning("The controlDict has already been modified. Restoring will be problementic") self.controlDict=ParameterFile(path.join(self.dir,"system","controlDict"),backup=True) self.controlDict.replaceParameter("writeControl","timeStep") self.controlDict.replaceParameter("writeInterval","1") self.writeRequested=True def stopHandle(self): """called after the program has stopped""" if self.stopMe or self.isRestarted: self.controlDict.restore() def lineHandle(self,line): """called every time a new line is read""" pass def logName(self): """Get the name of the logfiles""" return self.logFile def getSolutionDirectory(self,archive=None): """@return: The directory of the case @rtype: PyFoam.RunDictionary.SolutionDirectory @param archive: Name of the directory for archiving results""" return SolutionDirectory(self.dir,archive=archive,parallel=True) def addEndTrigger(self,f): """@param f: A function that is to be executed at the end of the simulation""" self.endTriggers.append(f)
def main(): parser=OptionParser(usage= usage, version=version) parser.add_option("-q", "--quiet", action="store_true", dest="quiet", default=False, help="Only print warnings and errors") parser.add_option("-t", "--template", action="store",dest="controlfile",default=None, help="Generate default controlfile") parser.add_option("-l", "--logfile", action="store",dest="logfile",default=None, help="Writes output to logfile") parser.add_option("-d", "--debug", action="store_true",dest="debug",default=False, help="Writes output to logfile") parser.add_option("-c", "--case", action="store",dest="case",default=None, help="Specifies case directory") (options, args) = parser.parse_args() rootLogger=logging.getLogger('') logger=logging.getLogger('canopyRunner') reportLevel=logging.INFO if options.quiet: reportLevel=logging.WARNING if options.debug: reportLevel=logging.DEBUG rootLogger.setLevel(reportLevel) if options.logfile==None: console=logging.StreamHandler() console.setLevel(reportLevel) formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') console.setFormatter(formatter) rootLogger.addHandler(console) if options.controlfile!=None: generateCf(path.abspath(options.controlfile)) print "Wrote default controlfile" sys.exit() if options.logfile!=None: logFileName=path.abspath(options.logfile) if not path.exists(path.dirname(logFileName)): print "Bad argument, directory for logfile does not exist" sys.exit() logfile=logging.FileHandler(logFileName,"w") logfile.setLevel(reportLevel) formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') logfile.setFormatter(formatter) rootLogger.addHandler(logfile) if len(args)!=1: parser.error("Incorrect number of arguments") cf=ControlFile.ControlFile(fileName=path.abspath(args[0])) if options.case!=None: casePath=path.abspath(options.case) else: casePath=os.getcwd() caseName=path.basename(casePath) ch=CaseHandler.CaseHandler(casePath) wspeeds=cf.findScalarList("wspeeds:",optional=False) wdirs=cf.findScalarList("wdirs:",optional=False) iterations=cf.findScalar("iterations:",optional=False) inletProfile_z0=cf.findScalar("z0:",optional=False) fieldsToArchive=cf.findStringList("fieldsToArchive:",optional=False) archiveDirName=cf.findString("flowArchiveDirName:",optional=False) restoreArchived=cf.findBoolean("restoreArchived:",optional=True,default=False) archiveVTK=cf.findBoolean("archiveVTK:",optional=False) VTKArchiveDir=cf.findExistingPath("VTKArchiveDir:",optional=False) nodes=int(cf.findScalar("nodes:",optional=False)) CPUs=cf.findScalar("CPUs:",optional=True) if CPUs==None: nprocesses=8*nodes else: nprocesses=int(CPUs) #----------------------------------- solver=cf.findString("solver:",default="windFoam") softStart=cf.findString("softstart_application:",optional=True) initCmds=cf.findStringList("initialize:",default=["setWindInlet"]) flowArchive=FoamArchive.FoamArchive(casePath,archiveDirName) nwdir= len(wdirs) convTable=ConvergenceTable.ConvergenceTable(casePath) logger.info("Running windRunner.py") logger.info("Setup overview:") logger.info(25*"-") logger.info("Case: "+ caseName) logger.info(25*"-") logger.info("Wind directions are: "+ str(wdirs)) logger.info("Wind speeds are: "+str(wspeeds)) nruns=nwdir*len(wspeeds) logger.info("Total number of runs: "+str(nruns)) logger.info(25*"-") logger.info("Number of iterations are: "+str(iterations)) logger.info("Number of nodes are: "+str(nodes)) logger.info("Fields to be archived: "+str(fieldsToArchive)) logger.info("ArchiveToVTK is set to: "+str(archiveVTK)) logger.info(50*"=") controlDict=ParameterFile(ch.controlDict()) windDict=ParameterFile(path.join(ch.constantDir(),"windDict")) RASDict=ParameterFile(path.join(ch.constantDir(),"RASProperties")) compression=controlDict.readParameter("writeCompression") if compression=="compressed" or compression=="on": filesToArchive=[field+".gz" for field in fieldsToArchive] else: filesToArchive=fieldsToArchive if not path.exists(VTKArchiveDir) and archiveVTK: logger.error("The VTKArchiveDir does not exist") sys.exit() #booting lammachine for parallell execution if nprocesses>1: Lam=LAMMachine(nr=nprocesses) Lam.writeScotch(ch) controlDict.replaceParameter("stopAt", "nextWrite") timeLeft=iterations*nruns*20 timeSpent=05 timeCase=iterations*20 timeEstimated=time.localtime(time.time()+timeLeft) casesRun=0 casesLeft=nruns ch.backUpInitialFields() logger.info("Backup made of initial fields") for wspeed in wspeeds: for wdir in wdirs: timeInit=time.time() controlDict.replaceParameter("writeInterval",str(iterations)) logger.info("Running calculations for dir: "+ str(wdir)+ " speed: "+ str(wspeed)) logger.info("Time left: "+str(timeLeft/60.0)+"min, Time spent: "+str(timeSpent/60.0)+"min") logger.info("Estimated time for finish: "+str(timeEstimated[:4])) logger.info("Cases finished: "+str(casesRun)+" cases left: "+str(casesLeft)) logger.info(" ") ch.clearResults() dirName="wspeed_"+str(wspeed)+"_wdir_"+str(wdir) logger.info("restoreArchived = "+str(restoreArchived)) if restoreArchived and flowArchive.inArchive(dirName=dirName): logger.info("Restoring archived flow fields") flowArchive.restore(dirName,fieldsToArchive,ch.initialDir()) for filename in fieldsToArchive: flowArchive.getFile(outputFile=path.join(ch.initialDir(),filename),fileName=filename,archiveDirName=dirName) logger.info("Restored archived flow fields!") else: logger.info("...Modifying bc:s") ch.modWindDir(ch.initialDir(),wdir) logger.info("bc:s modified!") logger.info("...Setting inlet profiles") windDict.replaceParameter("U10",str(wspeed)) windDict.replaceParameter("windDirection",str(wdir)) windDict.replaceParameter("z0",str(inletProfile_z0)) for initCmd in initCmds: initUtil=UtilityRunner(argv=[initCmd,"-case",casePath],silent=True,logname=initCmd) initUtil.start() if initUtil.runOK(): logger.info("Successfully finished: %s" %initCmd) else: logger.error("Error when running: %s" %initCmd) sys.exit() if nprocesses>1: if Lam.machineOK(): decomposeCmd="decomposePar" decomposeUtil=UtilityRunner(argv=[decomposeCmd,"-case",casePath],silent=True,logname="decomposePar") logger.info("...Decomposing case to run on"+str(Lam.cpuNr())+str(" of processors")) decomposeUtil.start() if decomposeUtil.runOK(): logger.info("Case decomposed!") else: logger.error("Error when running decomposePar") sys.exit() else: logger.error("Error: Could not start lam-machine") sys.exit() else: Lam=None logger.info("Serial Run chosen!") if softStart != None: RASDict.replaceParameter("RASModel","kEpsilon") controlDict.replaceParameter("stopAt","nextWrite") controlDict.replaceParameter("writeInterval","50") logger.info("...Softstarting using "+softStart) windFoamSolver = ConvergenceRunner(StandardLogAnalyzer(),argv=[softStart,"-case",casePath],silent=True,lam=Lam,logname=softStart) windFoamSolver.start() if windFoamSolver.runOK(): logger.info("Iterations finished for windFoam") else: logger.error("Error while running windFoam") sys.exit() RASDict.replaceParameter("RASModel","kEpsilon_canopy") controlDict.replaceParameter("writeInterval",str(iterations)) # The folowing line is to copy the landuse and LAD-files after the first iterations with simpleFoam ch.execute("for file in "+os.path.join(casePath,"processor*/0/[Ll]*") + r'; do for folder in ${file%0*}*; do [ -e ${folder}/`basename ${file}` ] || cp $file ${folder}/`basename ${file}`; done; done') logger.info("...Running solver for wind field") windFoamSolver = ConvergenceRunner(StandardLogAnalyzer(),argv=[solver,"-case",casePath],silent=True,lam=Lam,logname=solver) windFoamSolver.start() if windFoamSolver.runOK(): logger.info("Iterations finished for windFoam") else: logger.error("Error while running windFoam") sys.exit() if nprocesses>1: logger.info("Reconstructing decomposed case...") reconstructCmd="reconstructPar" reconstructUtil=UtilityRunner(argv=[reconstructCmd,"-latestTime","-case",casePath],silent=True,logname="reconstrucPar") reconstructUtil.start() if reconstructUtil.runOK(): logger.info("recunstruction ready!") else: logger.error("Error while running recontructPar") sys.exit() logger.info("Removing decomposed mesh") ch.execute("rm -r "+os.path.join(casePath,"processor*")) logger.info("Removed decomposed mesh!") convTable.addResidual("wd_"+str(wdir)+"_ws_"+str(wspeed),solver,"linear_Ux",casesRun+1) convTable.addResidual("wd_"+str(wdir)+"_ws_"+str(wspeed),solver,"linear_Uy",casesRun+1) convTable.addResidual("wd_"+str(wdir)+"_ws_"+str(wspeed),solver,"linear_k",casesRun+1) convTable.addResidual("wd_"+str(wdir)+"_ws_"+str(wspeed),solver,"linear_epsilon",casesRun+1) convTable.addProbes("wd_"+str(wdir)+"_ws_"+str(wspeed),"U",casesRun+1) convTable.addProbes("wd_"+str(wdir)+"_ws_"+str(wspeed),"k",casesRun+1) convTable.addProbes("wd_"+str(wdir)+"_ws_"+str(wspeed),"epsilon",casesRun+1) convTable.addProbes("wd_"+str(wdir)+"_ws_"+str(wspeed),"p",casesRun+1) logger.info("Archiving results from directory: %s" %ch.latestDir()) #save latest concentration result files solFiles=[file for file in os.listdir(ch.latestDir()) if file in filesToArchive] for filename in solFiles: dirName= "wspeed_"+str(wspeed)+"_wdir_"+str(wdir) flowArchive.addFile(path.join(ch.latestDir(),filename),dirName=dirName) if archiveVTK: #Creating a temporary last time directory to be used by foamToVTK VTKTime=str(eval(path.basename(ch.latestDir()))+1) newTimeDir=path.join(casePath,VTKTime) os.mkdir(newTimeDir) for filename in solFiles: oldFile=path.join(casePath,str(eval(ch.getLast())-1),filename) ch.execute("cp "+oldFile+" "+newTimeDir+"/") foamToVTKUtil=UtilityRunner(argv=["foamToVTK","-case",casePath,"-time "+VTKTime],silent=True,logname="foamToVTK") foamToVTKUtil.start() if foamToVTKUtil.runOK(): ch.execute("mv "+path.join(casePath,"VTK")+" "+path.join(VTKArchiveDir,"VTK"+"_wspeed_"+str(wspeed)+"_wdir_"+str(wdir))) ch.execute("rm -r "+path.join(casePath,VTKTime) ) logger.info("Exported to VTK archive!") else: logger.error("Error when exporting to VTK") sys.exit() logger.info("Finished wdir: "+ str(wdir)+ " wspeed: "+ str(wspeed)+ "Last iter = "+ch.getLast()) logger.info(" ") casesRun+=1 casesLeft-=1 timeCase=time.time()-timeInit timeSpent+=timeCase timeLeft=casesLeft*timeCase timeEstimated=time.localtime(time.time()+timeLeft) ch.clearResults() logger.info("Cleared all result directories exept: %s" %(" ".join(ch.getTimes() ) )) ch.restoreInitialFields() logger.info("Restored initital fields from backup copy") #restoring windData dictionary to original state windDict.purgeFile() convTable.writeProbes() convTable.writeResiduals() logger.info("Residuals and probes from solver windFoam written to case/convergence directory") #Restoring controlDict to original state controlDict.purgeFile() logger.info("Finished batch calculation!")
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( "-t", "--template", action="store", dest="template", help="Generate default controlfile" ) parser.add_argument( '-v', action=VerboseAction, dest='loglevel', default=logging.INFO, help='increase verbosity in terminal', ) parser.add_argument( '-l', metavar='logfile', action=LogFileAction, dest='logfile', help='write verbose output to logfile', ) parser.add_argument( "-c", "--case", action="store", dest="case", default=os.getcwd(), help="Specifies case directory (default is current workdir)", ) parser.add_argument( action="store", dest="controlfile", help="Controlfile for speciesRunner" ) args = parser.parse_args() if args.template is not None: generateCf(args.template, defaultCf) log.info('Wrote default controlfile') sys.exit(0) cf = ControlFile.ControlFile(fileName=args.controlfile) caseName = path.basename(args.case) ch = CaseHandler.CaseHandler(args.case) wspeeds = cf.findScalarList("wspeeds:", optional=False) wdirs = cf.findScalarList("wdirs:", optional=False) iterations = cf.findScalar("iterations:", optional=False) fieldsToArchive = cf.findStringList("fieldsToArchive:", optional=False) flowArchiveDirName = cf.findString( "flowArchiveDirName:", optional=True, default='flowArchive' ) concArchiveDirName = cf.findString( "concArchiveDirName:", optional=True, default='concArchive' ) restoreArchived = cf.findBoolean( "restoreArchived:", optional=True, default=False ) nodes = int(cf.findScalar("nodes:", optional=False)) CPUs = cf.findScalar("CPUs:", optional=True) if CPUs is None: nprocesses = CORES_PER_NODE * nodes else: nprocesses = int(CPUs) #----------------------------------- solver = cf.findString("solver:", default="speciesFoam") initCmds = cf.findStringList("initialize:", default=[], optional=True) flowArchive = FoamArchive.FoamArchive(args.case, flowArchiveDirName) concArchive = FoamArchive.FoamArchive(args.case, concArchiveDirName) nwdir = len(wdirs) convTable = ConvergenceTable.ConvergenceTable(args.case) log.info("Running speciesFoam") log.info("Setup overview:") log.info(25 * "-") log.info("Case: " + caseName) log.info(25 * "-") log.info("Wind directions are: " + str(wdirs)) log.info("Wind speeds are: " + str(wspeeds)) nruns = nwdir * len(wspeeds) log.info("Total number of runs: " + str(nruns)) log.info(25 * "-") log.info("Number of iterations are: " + str(iterations)) log.info("Number of nodes are: " + str(nodes)) log.info("Fields to be archived: " + str(fieldsToArchive)) log.info(50 * "=") controlDict = ParameterFile(ch.controlDict()) statisticsDict = ExtendedParameterFile.ExtendedParameterFile( path.join(ch.systemDir(), "statisticsDict") ) if controlDict.readParameter("writeCompression") == "compressed": filesToArchive = [field + ".gz" for field in fieldsToArchive] flowFiles = [field + ".gz" for field in FLOW_FILES] else: filesToArchive = fieldsToArchive flowFiles = FLOW_FILES # booting lammachine for parallell execution if nprocesses > 1: Lam = LAMMachine(nr=nprocesses) Lam.writeScotch(ch) controlDict.replaceParameter("stopAt", "nextWrite") timeLeft = iterations * nruns * 20 timeSpent = 05 timeCase = iterations * 20 timeEstimated = time.localtime(time.time() + timeLeft) casesRun = 0 casesLeft = nruns log.info("Backing up initial fields") ch.backUpInitialFields() log.info("Backup made of initial fields") for wspeed in wspeeds: for wdir in wdirs: timeInit = time.time() dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) if concArchive.inArchive(dirName=dirName) and not restoreArchived: log.info('Results already in concentration archive, moving on...') casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) continue if not flowArchive.inArchive(dirName=dirName): log.warning("Missing flow files in dir: %s, moving on..." % dirName) casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) continue controlDict.replaceParameter("writeInterval", str(iterations)) log.info( "Running calculations for dir: " + str(wdir) + " speed: " + str(wspeed) ) log.info( "Time left: " + str(timeLeft / 60.0) + "min, Time spent: "+str(timeSpent / 60.0) + "min" ) log.info( "Estimated time for finish: " + str(timeEstimated[:4]) ) log.info( "Cases finished: " + str(casesRun) + " cases left: " + str(casesLeft) ) log.info(" ") ch.clearResults() log.info("...Modifying bc:s") for f in flowFiles: ch.execute("rm " + path.join(ch.initialDir(), f)) ch.modWindDir(ch.initialDir(), wdir) log.info("bc:s modified!") log.info("Restoring archived flow fields") flowArchive.restore(dirName, ch.initialDir(), flowFiles) # for filename in flowFiles: # flowArchive.getFile( # outputFile=path.join(ch.initialDir(), filename), # fileName=filename, archiveDirName=dirName # ) log.info("Restored archived flow fields!") for initCmd in initCmds: initUtil = UtilityRunner( argv=[initCmd, "-case", args.case], silent=True, logname=initCmd ) initUtil.start() if initUtil.runOK(): log.info( "Successfully finished: %s" % initCmd ) else: log.error( "Error when running: %s" % initCmd ) sys.exit(1) if nprocesses > 1: if Lam.machineOK(): decomposeCmd = "decomposePar" decomposeUtil = UtilityRunner( argv=[decomposeCmd, "-case", args.case], silent=True, logname="decomposePar" ) log.info( "Decomposing case for %i processors" % Lam.cpuNr() ) decomposeUtil.start() if decomposeUtil.runOK(): log.info("Case decomposed!") else: log.error("Error when running decomposePar") sys.exit() else: log.error("Error: Could not start lam-machine") sys.exit() else: Lam = None log.info("Serial Run chosen!") log.info("...Running solver for species") FoamSolver = ConvergenceRunner( StandardLogAnalyzer(), argv=[solver, "-case", args.case], silent=True, lam=Lam, logname=solver ) FoamSolver.start() if FoamSolver.runOK(): log.info("Iterations finished for speciesFoam") else: log.error("Error while running speciesFoam") sys.exit() if nprocesses > 1: log.info("Reconstructing decomposed case...") reconstructCmd = "reconstructPar" reconstructUtil = UtilityRunner( argv=[reconstructCmd, "-case", args.case], silent=True, logname="reconstrucPar" ) reconstructUtil.start() if reconstructUtil.runOK(): log.info("recunstruction ready!") else: log.error("Error while running recontructPar") sys.exit() log.info("Removing decomposed mesh") ch.execute( "rm -r " + os.path.join(args.case, "processor*") ) log.info("Removed decomposed mesh!") iterationsReady = ( int(ch.getLast()) - int(path.basename(ch.initialDir())) ) if iterationsReady < iterations: log.warning( "Run was aborted before finalizing" + " the wanted number of iterations" ) log.warning( "Guessing that nan:s were present in results. " + "Removing results from current run and moving on" ) log.info("Archiving results") # save latest concentration result files solFiles = [ f for f in os.listdir(ch.latestDir()) if f[:4] == "spec" and f[:12] != "spec_default" and ".bak" not in f and "~" not in f and "#" not in f ] for filename in solFiles: dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) concArchive.addFile( path.join(ch.latestDir(), filename), dirName=dirName ) convTable.addResidual( "wd_" + str(wdir) + "_ws_" + str(wspeed), "speciesFoam", "linear_" + filename, casesRun+1 ) convTable.addProbes( "wd_" + str(wdir) + "_ws_" + str(wspeed), filename, casesRun + 1 ) log.info( "Residuals and probes from solver " + "speciesFoam added to convergence table" ) # Adding the list of names of archived concentration # files to the statisticsDict dictionary archivedConcFiles = concArchive.listFilesInDirs("spec_") statisticsDict.replaceParameterList( "concFileList", archivedConcFiles ) log.info("Finished wdir: %f, wspeed: %f, Last iter: %s" % ( wdir, wspeed, ch.getLast()) ) log.info(" ") casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) ch.clearResults() ch.restoreInitialFields() log.info("Restored initital fields") # restoring windData dictionary to original state convTable.writeProbes() convTable.writeResiduals() log.info( "Residuals and probes from solver windFoam " + "written to case/convergence directory" ) # Restoring controlDict to original state controlDict.purgeFile() log.info("Finished batch calculation!")
def testParameterFileRead(self): par = ParameterFile(path.join(self.dest, "system", "controlDict")) self.assertEqual(par.readParameter("notHere"), "") self.assertEqual(par.readParameter("startTime"), "0")
class BasicRunner(object): """Base class for the running of commands When the command is run the output is copied to a LogFile and (optionally) standard-out The argument list assumes for the first three elements the OpenFOAM-convention: <cmd> <dir> <case> The directory name for outputs is therefor created from <dir> and <case> Provides some handle-methods that are to be overloaded for additional functionality""" def __init__(self, argv=None, silent=False, logname=None, compressLog=False, lam=None, server=False, restart=False, noLog=False, logTail=None, remark=None, jobId=None, parameters=None, writeState=True, echoCommandLine=None): """:param argv: list with the tokens that are the command line if not set the standard command line is used :param silent: if True no output is sent to stdout :param logname: name of the logfile :param compressLog: Compress the logfile into a gzip :param lam: Information about a parallel run :param server: Whether or not to start the network-server :type lam: PyFoam.Execution.ParallelExecution.LAMMachine :param noLog: Don't output a log file :param logTail: only the last lines of the log should be written :param remark: User defined remark about the job :param parameters: User defined dictionary with parameters for documentation purposes :param jobId: Job ID of the controlling system (Queueing system) :param writeState: Write the state to some files in the case :param echoCommandLine: Prefix that is printed with the command line. If unset nothing is printed """ if sys.version_info < (2, 3): # Python 2.2 does not have the capabilities for the Server-Thread if server: warning( "Can not start server-process because Python-Version is too old" ) server = False if argv == None: self.argv = sys.argv[1:] else: self.argv = argv if oldApp(): self.dir = path.join(self.argv[1], self.argv[2]) if self.argv[2][-1] == path.sep: self.argv[2] = self.argv[2][:-1] else: self.dir = path.curdir if "-case" in self.argv: self.dir = self.argv[self.argv.index("-case") + 1] logname = calcLogname(logname, argv) try: sol = self.getSolutionDirectory() except OSError: e = sys.exc_info()[1] # compatible with 2.x and 3.x error("Solution directory", self.dir, "does not exist. No use running. Problem:", e) self.echoCommandLine = echoCommandLine self.silent = silent self.lam = lam self.origArgv = self.argv self.writeState = writeState self.__lastLastSeenWrite = 0 self.__lastNowTimeWrite = 0 if self.lam != None: self.argv = lam.buildMPIrun(self.argv) if config().getdebug("ParallelExecution"): debug("Command line:", " ".join(self.argv)) self.cmd = " ".join(self.argv) foamLogger().info("Starting: " + self.cmd + " in " + path.abspath(path.curdir)) self.logFile = path.join(self.dir, logname + ".logfile") isRestart, restartnr, restartName, lastlog = findRestartFiles( self.logFile, sol) if restartName: self.logFile = restartName if not isRestart: from os import unlink from glob import glob for g in glob(self.logFile + ".restart*"): if path.isdir(g): rmtree(g) else: unlink(g) self.noLog = noLog self.logTail = logTail if self.logTail: if self.noLog: warning("Log tail", self.logTail, "and no-log specified. Using logTail") self.noLog = True self.lastLines = [] self.compressLog = compressLog if self.compressLog: self.logFile += ".gz" self.fatalError = False self.fatalFPE = False self.fatalStackdump = False self.endSeen = False self.warnings = 0 self.started = False self.isRestarted = False if restart: self.controlDict = ParameterFile(path.join(self.dir, "system", "controlDict"), backup=True) self.controlDict.replaceParameter("startFrom", "latestTime") self.isRestarted = True else: self.controlDict = None self.run = FoamThread(self.cmd, self) self.server = None if server: self.server = FoamServer(run=self.run, master=self) self.server.setDaemon(True) self.server.start() try: IP, PID, Port = self.server.info() f = open(path.join(self.dir, "PyFoamServer.info"), "w") print_(IP, PID, Port, file=f) f.close() except AttributeError: warning( "There seems to be a problem with starting the server:", self.server, "with attributes", dir(self.server)) self.server = None self.createTime = None self.nowTime = None self.startTimestamp = time() self.stopMe = False self.writeRequested = False self.endTriggers = [] self.lastLogLineSeen = None self.lastTimeStepSeen = None self.remark = remark self.jobId = jobId self.data = {"lines": 0} # self.data={"lines":0L} self.data["logfile"] = self.logFile self.data["casefullname"] = path.abspath(self.dir) self.data["casename"] = path.basename(path.abspath(self.dir)) self.data["solver"] = path.basename(self.argv[0]) self.data["solverFull"] = self.argv[0] self.data["commandLine"] = self.cmd self.data["hostname"] = uname()[1] if remark: self.data["remark"] = remark else: self.data["remark"] = "No remark given" if jobId: self.data["jobId"] = jobId parameterFile = sol.getParametersFromFile() if len(parameterFile): self.data["parameters"] = {} for k, v in parameterFile.items(): self.data["parameters"][k] = makePrimitiveString(v) if parameters: if "parameters" not in self.data: self.data["parameters"] = {} self.data["parameters"].update(parameters) self.data["starttime"] = asctime() def appendTailLine(self, line): """Append lines to the tail of the log""" if len(self.lastLines) > 10 * self.logTail: # truncate the lines, but not too often self.lastLines = self.lastLines[-self.logTail:] self.writeTailLog() self.lastLines.append(line + "\n") def writeTailLog(self): """Write the last lines to the log""" fh = open(self.logFile, "w") if len(self.lastLines) <= self.logTail: fh.writelines(self.lastLines) else: fh.writelines(self.lastLines[-self.logTail:]) fh.close() def start(self): """starts the command and stays with it till the end""" self.started = True if not self.noLog: if self.compressLog: fh = gzip.open(self.logFile, "w") else: fh = open(self.logFile, "w") self.startHandle() self.writeStartTime() self.writeTheState("Running") check = BasicRunnerCheck() if self.echoCommandLine: print_(self.echoCommandLine + " " + " ".join(self.argv)) self.run.start() interrupted = False totalWarningLines = 0 addLinesToWarning = 0 collectWarnings = True while self.run.check(): try: self.run.read() if not self.run.check(): break line = self.run.getLine() if "errorText" in self.data: self.data["errorText"] += line + "\n" if addLinesToWarning > 0: self.data["warningText"] += line + "\n" addLinesToWarning -= 1 totalWarningLines += 1 if totalWarningLines > 500: collectWarnings = False addLinesToWarning = 0 self.data[ "warningText"] += "No more warnings added because limit of 500 lines exceeded" self.data["lines"] += 1 self.lastLogLineSeen = time() self.writeLastSeen() tmp = check.getTime(line) if check.controlDictRead(line): if self.writeRequested: duration = config().getfloat("Execution", "controlDictRestoreWait", default=30.) warning( "Preparing to reset controlDict to old glory in", duration, "seconds") Timer(duration, restoreControlDict, args=[self.controlDict, self]).start() self.writeRequested = False if tmp != None: self.data["time"] = tmp self.nowTime = tmp self.writeTheState("Running", always=False) self.writeNowTime() self.lastTimeStepSeen = time() if self.createTime == None: # necessary because interFoam reports no creation time self.createTime = tmp try: self.data["stepNr"] += 1 except KeyError: self.data["stepNr"] = 1 # =1L self.data["lasttimesteptime"] = asctime() tmp = check.getCreateTime(line) self.endSeen = check.endSeen if tmp != None: self.createTime = tmp if not self.silent: try: print_(line) except IOError: e = sys.exc_info()[1] # compatible with 2.x and 3.x if e.errno != 32: raise e else: # Pipe was broken self.run.interrupt() if line.find("FOAM FATAL ERROR") >= 0 or line.find( "FOAM FATAL IO ERROR") >= 0: self.fatalError = True self.data["errorText"] = "PyFoam found a Fatal Error " if "time" in self.data: self.data["errorText"] += "at time " + str( self.data["time"]) + "\n" else: self.data["errorText"] += "before time started\n" self.data["errorText"] += "\n" + line + "\n" if line.find("Foam::sigFpe::sigFpeHandler") >= 0: self.fatalFPE = True if line.find("Foam::error::printStack") >= 0: self.fatalStackdump = True if self.fatalError and line != "": foamLogger().error(line) if line.find("FOAM Warning") >= 0: self.warnings += 1 try: self.data["warnings"] += 1 except KeyError: self.data["warnings"] = 1 if collectWarnings: addLinesToWarning = 20 if not "warningText" in self.data: self.data["warningText"] = "" else: self.data["warningText"] += ("-" * 40) + "\n" self.data[ "warningText"] += "Warning found by PyFoam on line " self.data["warningText"] += str( self.data["lines"]) + " " if "time" in self.data: self.data["warningText"] += "at time " + str( self.data["time"]) + "\n" else: self.data["warningText"] += "before time started\n" self.data["warningText"] += "\n" + line + "\n" if self.server != None: self.server._insertLine(line) self.lineHandle(line) if not self.noLog: fh.write(line + "\n") fh.flush() elif self.logTail: self.appendTailLine(line) except KeyboardInterrupt: e = sys.exc_info()[1] # compatible with 2.x and 3.x foamLogger().warning("Keyboard Interrupt") self.run.interrupt() self.writeTheState("Interrupted") self.data["keyboardInterrupt"] = True interrupted = True if not "keyboardInterrupt" in self.data: self.data["keyboardInterrupt"] = self.run.keyboardInterupted self.data["interrupted"] = interrupted self.data["OK"] = self.runOK() self.data["endSeen"] = self.endSeen self.data["cpuTime"] = self.run.cpuTime() self.data["cpuUserTime"] = self.run.cpuUserTime() self.data["cpuSystemTime"] = self.run.cpuSystemTime() self.data["wallTime"] = self.run.wallTime() self.data["usedMemory"] = self.run.usedMemory() self.data["endtime"] = asctime() self.data["fatalError"] = self.fatalError self.data["fatalFPE"] = self.fatalFPE self.data["fatalStackdump"] = self.fatalStackdump self.writeNowTime(force=True) self.stopHandle() if not interrupted: if self.endSeen: self.writeTheState("Finished - Ended") else: self.writeTheState("Finished") for t in self.endTriggers: t() if not self.noLog: fh.close() elif self.logTail: self.writeTailLog() if self.server != None: self.server.deregister() self.server.kill() foamLogger().info("Finished") return self.data def writeToStateFile(self, fName, message): """Write a message to a state file""" if self.writeState: open(path.join(self.dir, "PyFoamState." + fName), "w").write(message + "\n") def writeStartTime(self): """Write the real time the run was started at""" self.writeToStateFile("StartedAt", asctime()) def writeTheState(self, state, always=True): """Write the current state the run is in""" if always or (time() - self.__lastLastSeenWrite) > 9: self.writeToStateFile("TheState", state) def writeLastSeen(self): if (time() - self.__lastLastSeenWrite) > 10: self.writeToStateFile("LastOutputSeen", asctime()) self.__lastLastSeenWrite = time() def writeNowTime(self, force=False): if (time() - self.__lastNowTimeWrite) > 10 or force: self.writeToStateFile("CurrentTime", str(self.nowTime)) self.__lastNowTimeWrite = time() def runOK(self): """checks whether the run was successful""" if self.started: return not self.fatalError and not self.fatalFPE and not self.fatalStackdump # and self.run.getReturnCode()==0 else: return False def startHandle(self): """to be called before the program is started""" pass def _writeStopAt(self, value, message): """Write stopAt to stop the run gracefully""" if not self.stopMe: self.stopMe = True if not self.isRestarted: if self.controlDict: warning( "The controlDict has already been modified. Restoring will be problementic" ) self.controlDict = ParameterFile(path.join( self.dir, "system", "controlDict"), backup=True) self.controlDict.replaceParameter("stopAt", value) warning(message) def stopGracefully(self): """Tells the runner to stop at the next convenient time""" self._writeStopAt("writeNow", "Stopping run and writting") def stopAtNextWrite(self): """Tells the runner to stop at the next write""" self._writeStopAt("nextWrite", "Stopping run at next write") def stopWithoutWrite(self): """Tells the runner to stop without writing""" self._writeStopAt("noWriteNow", "Stopping run without writing") def writeResults(self): """Writes the next possible time-step""" # warning("writeResult is not yet implemented") if not self.writeRequested: if not self.isRestarted: if self.controlDict: warning( "The controlDict has already been modified. Restoring will be problementic" ) self.controlDict = ParameterFile(path.join( self.dir, "system", "controlDict"), backup=True) self.controlDict.replaceParameter("writeControl", "timeStep") self.controlDict.replaceParameter("writeInterval", "1") self.writeRequested = True def stopHandle(self): """called after the program has stopped""" if self.stopMe or self.isRestarted: self.controlDict.restore() def lineHandle(self, line): """called every time a new line is read""" pass def logName(self): """Get the name of the logfiles""" return self.logFile def getSolutionDirectory(self, archive=None): """:return: The directory of the case :rtype: PyFoam.RunDictionary.SolutionDirectory :param archive: Name of the directory for archiving results""" return SolutionDirectory(self.dir, archive=archive, parallel=True) def addEndTrigger(self, f): """:param f: A function that is to be executed at the end of the simulation""" self.endTriggers.append(f)
from PyFoam.RunDictionary.ParameterFile import ParameterFile import sys file=sys.argv[1] name=sys.argv[2] neu =sys.argv[3] para=ParameterFile(file) print "Old value", para.readParameter(name) para.replaceParameter(name,neu) print "new value", para.readParameter(name) para.purgeFile() print "reset value", para.readParameter(name)
def __init__(self, name, backup=False): self.logger = logging.getLogger('ExtendedParameterFile') ParameterFile.__init__(self, name, backup)
def run(self): config=ConfigParser.ConfigParser() files=self.parser.getArgs() good=config.read(files) # will work with 2.4 # if len(good)!=len(files): # print "Problem while trying to parse files",files # print "Only ",good," could be parsed" # sys.exit(-1) benchName=config.get("General","name") if self.opts.nameAddition!=None: benchName+="_"+self.opts.nameAddition if self.opts.foamVersion!=None: benchName+="_v"+self.opts.foamVersion isParallel=config.getboolean("General","parallel") lam=None if isParallel: nrCpus=config.getint("General","nProcs") machineFile=config.get("General","machines") if not path.exists(machineFile): self.error("Machine file ",machineFile,"needed for parallel run") lam=LAMMachine(machineFile,nr=nrCpus) if lam.cpuNr()>nrCpus: self.error("Wrong number of CPUs: ",lam.cpuNr()) print "Running parallel on",lam.cpuNr(),"CPUs" if config.has_option("General","casesDirectory"): casesDirectory=path.expanduser(config.get("General","casesDirectory")) else: casesDirectory=foamTutorials() if not path.exists(casesDirectory): self.error("Directory",casesDirectory,"needed with the benchmark cases is missing") else: print "Using cases from directory",casesDirectory benchCases=[] config.remove_section("General") for sec in config.sections(): print "Reading: ",sec skipIt=False skipReason="" if config.has_option(sec,"skip"): skipIt=config.getboolean(sec,"skip") skipReason="Switched off in file" if self.opts.excases!=None and not skipIt: for p in self.opts.excases: if fnmatch(sec,p): skipIt=True skipReason="Switched off by pattern '"+p+"'" if self.opts.cases!=None: for p in self.opts.cases: if fnmatch(sec,p): skipIt=False skipReason="" if skipIt: print "Skipping case ..... Reason:"+skipReason continue sol=config.get(sec,"solver") cas=config.get(sec,"case") pre=eval(config.get(sec,"prepare")) preCon=[] if config.has_option(sec,"preControlDict"): preCon=eval(config.get(sec,"preControlDict")) con=eval(config.get(sec,"controlDict")) bas=config.getfloat(sec,"baseline") wei=config.getfloat(sec,"weight") add=[] if config.has_option(sec,"additional"): add=eval(config.get(sec,"additional")) print "Adding: ", add util=[] if config.has_option(sec,"utilities"): util=eval(config.get(sec,"utilities")) print "Utilities: ", util nr=99999 if config.has_option(sec,"nr"): nr=eval(config.get(sec,"nr")) sp=None if config.has_option(sec,"blockSplit"): sp=eval(config.get(sec,"blockSplit")) toRm=[] if config.has_option(sec,"filesToRemove"): toRm=eval(config.get(sec,"filesToRemove")) setInit=[] if config.has_option(sec,"setInitial"): setInit=eval(config.get(sec,"setInitial")) parallelOK=False if config.has_option(sec,"parallelOK"): parallelOK=config.getboolean(sec,"parallelOK") deMet=["metis"] if config.has_option(sec,"decomposition"): deMet=config.get(sec,"decomposition").split() if deMet[0]=="metis": pass elif deMet[0]=="simple": if len(deMet)<2: deMet.append(0) else: deMet[1]=int(deMet[1]) else: print "Unimplemented decomposition method",deMet[0],"switching to metis" deMet=["metis"] if isParallel==False or parallelOK==True: if path.exists(path.join(casesDirectory,sol,cas)): benchCases.append( (nr,sec,sol,cas,pre,con,preCon,bas,wei,add,util,sp,toRm,setInit,deMet) ) else: print "Skipping",sec,"because directory",path.join(casesDirectory,sol,cas),"could not be found" else: print "Skipping",sec,"because not parallel" benchCases.sort() parallelString="" if isParallel: parallelString=".cpus="+str(nrCpus) resultFile=open("Benchmark."+benchName+"."+uname()[1]+parallelString+".results","w") totalSpeedup=0 minSpeedup=None maxSpeedup=None totalWeight =0 runsOK=0 currentEstimate = 1. print "\nStart Benching\n" csv=CSVCollection("Benchmark."+benchName+"."+uname()[1]+parallelString+".csv") # csvHeaders=["description","solver","case","caseDir","base", # "benchmark","machine","arch","cpus","os","version", # "wallclocktime","cputime","cputimeuser","cputimesystem","maxmemory","cpuusage","speedup"] for nr,description,solver,case,prepare,control,preControl,base,weight,additional,utilities,split,toRemove,setInit,decomposition in benchCases: # control.append( ("endTime",-2000) ) print "Running Benchmark: ",description print "Solver: ",solver print "Case: ",case caseName=solver+"_"+case+"_"+benchName+"."+uname()[1]+".case" print "Short name: ",caseName caseDir=caseName+".runDir" csv["description"]=description csv["solver"]=solver csv["case"]=case csv["caseDir"]=caseDir csv["base"]=base csv["benchmark"]=benchName csv["machine"]=uname()[1] csv["arch"]=uname()[4] if lam==None: csv["cpus"]=1 else: csv["cpus"]=lam.cpuNr() csv["os"]=uname()[0] csv["version"]=uname()[2] workDir=path.realpath(path.curdir) orig=SolutionDirectory(path.join(casesDirectory,solver,case), archive=None, paraviewLink=False) for a in additional+utilities: orig.addToClone(a) orig.cloneCase(path.join(workDir,caseDir)) if oldApp(): argv=[solver,workDir,caseDir] else: argv=[solver,"-case",path.join(workDir,caseDir)] run=BasicRunner(silent=True,argv=argv,logname="BenchRunning",lam=lam) runDir=run.getSolutionDirectory() controlFile=ParameterFile(runDir.controlDict()) for name,value in preControl: print "Setting parameter",name,"to",value,"in controlDict" controlFile.replaceParameter(name,value) for rm in toRemove: fn=path.join(caseDir,rm) print "Removing file",fn remove(fn) for field,bc,val in setInit: print "Setting",field,"on",bc,"to",val SolutionFile(runDir.initialDir(),field).replaceBoundary(bc,val) oldDeltaT=controlFile.replaceParameter("deltaT",0) for u in utilities: print "Building utility ",u execute("wmake 2>&1 >%s %s" % (path.join(caseDir,"BenchCompile."+u),path.join(caseDir,u))) print "Preparing the case: " if lam!=None: prepare=prepare+[("decomposePar","")] if decomposition[0]=="metis": lam.writeMetis(SolutionDirectory(path.join(workDir,caseDir))) elif decomposition[0]=="simple": lam.writeSimple(SolutionDirectory(path.join(workDir,caseDir)),decomposition[1]) if split: print "Splitting the mesh:",split bm=BlockMesh(runDir.blockMesh()) bm.refineMesh(split) for pre,post in prepare: print "Doing ",pre," ...." post=post.replace("%case%",caseDir) if oldApp(): args=string.split("%s %s %s %s" % (pre,workDir,caseDir,post)) else: args=string.split("%s -case %s %s" % (pre,path.join(workDir,caseDir),post)) util=BasicRunner(silent=True,argv=args,logname="BenchPrepare_"+pre) util.start() controlFile.replaceParameter("deltaT",oldDeltaT) # control.append(("endTime",-1000)) for name,value in control: print "Setting parameter",name,"to",value,"in controlDict" controlFile.replaceParameter(name,value) print "Starting at ",asctime(localtime(time())) print " Baseline is %f, estimated speedup %f -> estimated end at %s " % (base,currentEstimate,asctime(localtime(time()+base/currentEstimate))) print "Running the case ...." run.start() speedup=None cpuUsage=0 speedupOut=-1 try: speedup=base/run.run.wallTime() cpuUsage=100.*run.run.cpuTime()/run.run.wallTime() except ZeroDivisionError: print "Division by Zero: ",run.run.wallTime() if not run.runOK(): print "\nWARNING!!!!" print "Run had a problem, not using the results. Check the log\n" speedup=None if speedup!=None: speedupOut=speedup totalSpeedup+=speedup*weight totalWeight +=weight runsOK+=1 if maxSpeedup==None: maxSpeedup=speedup elif speedup>maxSpeedup: maxSpeedup=speedup if minSpeedup==None: minSpeedup=speedup elif speedup<minSpeedup: minSpeedup=speedup print "Wall clock: ",run.run.wallTime() print "Speedup: ",speedup," (Baseline: ",base,")" print "CPU Time: ",run.run.cpuTime() print "CPU Time User: "******"CPU Time System: ",run.run.cpuSystemTime() print "Memory: ",run.run.usedMemory() print "CPU Usage: %6.2f%%" % (cpuUsage) csv["wallclocktime"]=run.run.wallTime() csv["cputime"]=run.run.cpuTime() csv["cputimeuser"]=run.run.cpuUserTime() csv["cputimesystem"]=run.run.cpuSystemTime() csv["maxmemory"]=run.run.usedMemory() csv["cpuusage"]=cpuUsage if speedup!=None: csv["speedup"]=speedup else: csv["speedup"]="##" csv.write() resultFile.write("Case %s WallTime %g CPUTime %g UserTime %g SystemTime %g Memory %g MB Speedup %g\n" %(caseName,run.run.wallTime(),run.run.cpuTime(),run.run.cpuUserTime(),run.run.cpuSystemTime(),run.run.usedMemory(),speedupOut)) resultFile.flush() if speedup!=None: currentEstimate=totalSpeedup/totalWeight if self.opts.removeCases: print "Clearing case", if speedup==None: print "not ... because it failed" else: print "completely" rmtree(caseDir,ignore_errors=True) print print if lam!=None: lam.stop() print "Total Speedup: ",currentEstimate," ( ",totalSpeedup," / ",totalWeight, " ) Range: [",minSpeedup,",",maxSpeedup,"]" print runsOK,"of",len(benchCases),"ran OK" resultFile.write("Total Speedup: %g\n" % (currentEstimate)) if minSpeedup and maxSpeedup: resultFile.write("Range: [ %g , %g ]\n" % (minSpeedup,maxSpeedup)) resultFile.close()
def testParameterReadWithTab(self): par = ParameterFile(path.join(self.dest, "system", "controlDict")) par.replaceParameter("startTime", " 42") self.assertEqual(par.readParameter("startTime"), "42") par.replaceParameter("startTime", "\t 42") self.assertEqual(par.readParameter("startTime"), "42")
def __init__(self, argv=None, silent=False, logname=None, compressLog=False, lam=None, server=False, restart=False, noLog=False, logTail=None, remark=None, jobId=None, parameters=None, writeState=True, echoCommandLine=None): """:param argv: list with the tokens that are the command line if not set the standard command line is used :param silent: if True no output is sent to stdout :param logname: name of the logfile :param compressLog: Compress the logfile into a gzip :param lam: Information about a parallel run :param server: Whether or not to start the network-server :type lam: PyFoam.Execution.ParallelExecution.LAMMachine :param noLog: Don't output a log file :param logTail: only the last lines of the log should be written :param remark: User defined remark about the job :param parameters: User defined dictionary with parameters for documentation purposes :param jobId: Job ID of the controlling system (Queueing system) :param writeState: Write the state to some files in the case :param echoCommandLine: Prefix that is printed with the command line. If unset nothing is printed """ if sys.version_info < (2, 3): # Python 2.2 does not have the capabilities for the Server-Thread if server: warning( "Can not start server-process because Python-Version is too old" ) server = False if argv == None: self.argv = sys.argv[1:] else: self.argv = argv if oldApp(): self.dir = path.join(self.argv[1], self.argv[2]) if self.argv[2][-1] == path.sep: self.argv[2] = self.argv[2][:-1] else: self.dir = path.curdir if "-case" in self.argv: self.dir = self.argv[self.argv.index("-case") + 1] logname = calcLogname(logname, argv) try: sol = self.getSolutionDirectory() except OSError: e = sys.exc_info()[1] # compatible with 2.x and 3.x error("Solution directory", self.dir, "does not exist. No use running. Problem:", e) self.echoCommandLine = echoCommandLine self.silent = silent self.lam = lam self.origArgv = self.argv self.writeState = writeState self.__lastLastSeenWrite = 0 self.__lastNowTimeWrite = 0 if self.lam != None: self.argv = lam.buildMPIrun(self.argv) if config().getdebug("ParallelExecution"): debug("Command line:", " ".join(self.argv)) self.cmd = " ".join(self.argv) foamLogger().info("Starting: " + self.cmd + " in " + path.abspath(path.curdir)) self.logFile = path.join(self.dir, logname + ".logfile") isRestart, restartnr, restartName, lastlog = findRestartFiles( self.logFile, sol) if restartName: self.logFile = restartName if not isRestart: from os import unlink from glob import glob for g in glob(self.logFile + ".restart*"): if path.isdir(g): rmtree(g) else: unlink(g) self.noLog = noLog self.logTail = logTail if self.logTail: if self.noLog: warning("Log tail", self.logTail, "and no-log specified. Using logTail") self.noLog = True self.lastLines = [] self.compressLog = compressLog if self.compressLog: self.logFile += ".gz" self.fatalError = False self.fatalFPE = False self.fatalStackdump = False self.endSeen = False self.warnings = 0 self.started = False self.isRestarted = False if restart: self.controlDict = ParameterFile(path.join(self.dir, "system", "controlDict"), backup=True) self.controlDict.replaceParameter("startFrom", "latestTime") self.isRestarted = True else: self.controlDict = None self.run = FoamThread(self.cmd, self) self.server = None if server: self.server = FoamServer(run=self.run, master=self) self.server.setDaemon(True) self.server.start() try: IP, PID, Port = self.server.info() f = open(path.join(self.dir, "PyFoamServer.info"), "w") print_(IP, PID, Port, file=f) f.close() except AttributeError: warning( "There seems to be a problem with starting the server:", self.server, "with attributes", dir(self.server)) self.server = None self.createTime = None self.nowTime = None self.startTimestamp = time() self.stopMe = False self.writeRequested = False self.endTriggers = [] self.lastLogLineSeen = None self.lastTimeStepSeen = None self.remark = remark self.jobId = jobId self.data = {"lines": 0} # self.data={"lines":0L} self.data["logfile"] = self.logFile self.data["casefullname"] = path.abspath(self.dir) self.data["casename"] = path.basename(path.abspath(self.dir)) self.data["solver"] = path.basename(self.argv[0]) self.data["solverFull"] = self.argv[0] self.data["commandLine"] = self.cmd self.data["hostname"] = uname()[1] if remark: self.data["remark"] = remark else: self.data["remark"] = "No remark given" if jobId: self.data["jobId"] = jobId parameterFile = sol.getParametersFromFile() if len(parameterFile): self.data["parameters"] = {} for k, v in parameterFile.items(): self.data["parameters"][k] = makePrimitiveString(v) if parameters: if "parameters" not in self.data: self.data["parameters"] = {} self.data["parameters"].update(parameters) self.data["starttime"] = asctime()
from PyFoam.RunDictionary.ParameterFile import ParameterFile import sys file = sys.argv[1] name = sys.argv[2] neu = sys.argv[3] para = ParameterFile(file) print "Old value", para.readParameter(name) para.replaceParameter(name, neu) print "new value", para.readParameter(name) para.purgeFile() print "reset value", para.readParameter(name)
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("-t", "--template", action="store", dest="template", help="Generate default controlfile") parser.add_argument( '-v', action=VerboseAction, dest='loglevel', default=logging.INFO, help='increase verbosity in terminal', ) parser.add_argument( '-l', metavar='logfile', action=LogFileAction, dest='logfile', help='write verbose output to logfile', ) parser.add_argument( "-c", "--case", action="store", dest="case", default=os.getcwd(), help="Specifies case directory (default is current workdir)", ) parser.add_argument(action="store", dest="controlfile", help="Controlfile for speciesRunner") args = parser.parse_args() if args.template is not None: generateCf(args.template, defaultCf) log.info('Wrote default controlfile') sys.exit(0) cf = ControlFile.ControlFile(fileName=args.controlfile) caseName = path.basename(args.case) ch = CaseHandler.CaseHandler(args.case) wspeeds = cf.findScalarList("wspeeds:", optional=False) wdirs = cf.findScalarList("wdirs:", optional=False) iterations = cf.findScalar("iterations:", optional=False) fieldsToArchive = cf.findStringList("fieldsToArchive:", optional=False) flowArchiveDirName = cf.findString("flowArchiveDirName:", optional=True, default='flowArchive') concArchiveDirName = cf.findString("concArchiveDirName:", optional=True, default='concArchive') restoreArchived = cf.findBoolean("restoreArchived:", optional=True, default=False) nodes = int(cf.findScalar("nodes:", optional=False)) CPUs = cf.findScalar("CPUs:", optional=True) if CPUs is None: nprocesses = CORES_PER_NODE * nodes else: nprocesses = int(CPUs) #----------------------------------- solver = cf.findString("solver:", default="speciesFoam") initCmds = cf.findStringList("initialize:", default=[], optional=True) flowArchive = FoamArchive.FoamArchive(args.case, flowArchiveDirName) concArchive = FoamArchive.FoamArchive(args.case, concArchiveDirName) nwdir = len(wdirs) convTable = ConvergenceTable.ConvergenceTable(args.case) log.info("Running speciesFoam") log.info("Setup overview:") log.info(25 * "-") log.info("Case: " + caseName) log.info(25 * "-") log.info("Wind directions are: " + str(wdirs)) log.info("Wind speeds are: " + str(wspeeds)) nruns = nwdir * len(wspeeds) log.info("Total number of runs: " + str(nruns)) log.info(25 * "-") log.info("Number of iterations are: " + str(iterations)) log.info("Number of nodes are: " + str(nodes)) log.info("Fields to be archived: " + str(fieldsToArchive)) log.info(50 * "=") controlDict = ParameterFile(ch.controlDict()) statisticsDict = ExtendedParameterFile.ExtendedParameterFile( path.join(ch.systemDir(), "statisticsDict")) if controlDict.readParameter("writeCompression") == "compressed": filesToArchive = [field + ".gz" for field in fieldsToArchive] flowFiles = [field + ".gz" for field in FLOW_FILES] else: filesToArchive = fieldsToArchive flowFiles = FLOW_FILES # booting lammachine for parallell execution if nprocesses > 1: Lam = LAMMachine(nr=nprocesses) Lam.writeScotch(ch) controlDict.replaceParameter("stopAt", "nextWrite") timeLeft = iterations * nruns * 20 timeSpent = 05 timeCase = iterations * 20 timeEstimated = time.localtime(time.time() + timeLeft) casesRun = 0 casesLeft = nruns log.info("Backing up initial fields") ch.backUpInitialFields() log.info("Backup made of initial fields") for wspeed in wspeeds: for wdir in wdirs: timeInit = time.time() dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) if concArchive.inArchive(dirName=dirName) and not restoreArchived: log.info( 'Results already in concentration archive, moving on...') casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) continue if not flowArchive.inArchive(dirName=dirName): log.warning("Missing flow files in dir: %s, moving on..." % dirName) casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) continue controlDict.replaceParameter("writeInterval", str(iterations)) log.info("Running calculations for dir: " + str(wdir) + " speed: " + str(wspeed)) log.info("Time left: " + str(timeLeft / 60.0) + "min, Time spent: " + str(timeSpent / 60.0) + "min") log.info("Estimated time for finish: " + str(timeEstimated[:4])) log.info("Cases finished: " + str(casesRun) + " cases left: " + str(casesLeft)) log.info(" ") ch.clearResults() log.info("...Modifying bc:s") for f in flowFiles: ch.execute("rm " + path.join(ch.initialDir(), f)) ch.modWindDir(ch.initialDir(), wdir) log.info("bc:s modified!") log.info("Restoring archived flow fields") flowArchive.restore(dirName, ch.initialDir(), flowFiles) # for filename in flowFiles: # flowArchive.getFile( # outputFile=path.join(ch.initialDir(), filename), # fileName=filename, archiveDirName=dirName # ) log.info("Restored archived flow fields!") for initCmd in initCmds: initUtil = UtilityRunner(argv=[initCmd, "-case", args.case], silent=True, logname=initCmd) initUtil.start() if initUtil.runOK(): log.info("Successfully finished: %s" % initCmd) else: log.error("Error when running: %s" % initCmd) sys.exit(1) if nprocesses > 1: if Lam.machineOK(): decomposeCmd = "decomposePar" decomposeUtil = UtilityRunner( argv=[decomposeCmd, "-case", args.case], silent=True, logname="decomposePar") log.info("Decomposing case for %i processors" % Lam.cpuNr()) decomposeUtil.start() if decomposeUtil.runOK(): log.info("Case decomposed!") else: log.error("Error when running decomposePar") sys.exit() else: log.error("Error: Could not start lam-machine") sys.exit() else: Lam = None log.info("Serial Run chosen!") log.info("...Running solver for species") FoamSolver = ConvergenceRunner(StandardLogAnalyzer(), argv=[solver, "-case", args.case], silent=True, lam=Lam, logname=solver) FoamSolver.start() if FoamSolver.runOK(): log.info("Iterations finished for speciesFoam") else: log.error("Error while running speciesFoam") sys.exit() if nprocesses > 1: log.info("Reconstructing decomposed case...") reconstructCmd = "reconstructPar" reconstructUtil = UtilityRunner( argv=[reconstructCmd, "-case", args.case], silent=True, logname="reconstrucPar") reconstructUtil.start() if reconstructUtil.runOK(): log.info("recunstruction ready!") else: log.error("Error while running recontructPar") sys.exit() log.info("Removing decomposed mesh") ch.execute("rm -r " + os.path.join(args.case, "processor*")) log.info("Removed decomposed mesh!") iterationsReady = (int(ch.getLast()) - int(path.basename(ch.initialDir()))) if iterationsReady < iterations: log.warning("Run was aborted before finalizing" + " the wanted number of iterations") log.warning("Guessing that nan:s were present in results. " + "Removing results from current run and moving on") log.info("Archiving results") # save latest concentration result files solFiles = [ f for f in os.listdir(ch.latestDir()) if f[:4] == "spec" and f[:12] != "spec_default" and ".bak" not in f and "~" not in f and "#" not in f ] for filename in solFiles: dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) concArchive.addFile(path.join(ch.latestDir(), filename), dirName=dirName) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), "speciesFoam", "linear_" + filename, casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), filename, casesRun + 1) log.info("Residuals and probes from solver " + "speciesFoam added to convergence table") # Adding the list of names of archived concentration # files to the statisticsDict dictionary archivedConcFiles = concArchive.listFilesInDirs("spec_") statisticsDict.replaceParameterList("concFileList", archivedConcFiles) log.info("Finished wdir: %f, wspeed: %f, Last iter: %s" % (wdir, wspeed, ch.getLast())) log.info(" ") casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) ch.clearResults() ch.restoreInitialFields() log.info("Restored initital fields") # restoring windData dictionary to original state convTable.writeProbes() convTable.writeResiduals() log.info("Residuals and probes from solver windFoam " + "written to case/convergence directory") # Restoring controlDict to original state controlDict.purgeFile() log.info("Finished batch calculation!")
def testParameterFileRead(self): par=ParameterFile(path.join(self.dest,"system","controlDict")) self.assertEqual(par.readParameter("notHere"),"") self.assertEqual(par.readParameter("startTime"),"0")
def __init__(self, name, backup=False): self.logger=logging.getLogger('ExtendedParameterFile') ParameterFile.__init__(self,name,backup)
def run(self): casePath = self.parser.casePath() self.checkCase(casePath) # self.addLocalConfig(casePath) self.addToCaseLog(casePath, "Starting") self.prepareHooks() self.processPlotLineOptions(autoPath=casePath) lam = self.getParallel(SolutionDirectory(casePath, archive=None)) isParallel = lam is not None self.lastWrittenTime = None sol = SolutionDirectory(casePath, archive=None, parallel=isParallel) ctrlDict = ParameterFile(sol.controlDict(), backup=False) if ctrlDict.readParameter("startFrom") != "latestTime": self.error( "In", casePath, "the value of 'startFrom' is not 'latestTime' (required for this script)" ) args = self.replaceAutoInArgs(self.parser.getArgs()) def checkRestart(data=None): lastTimeName = sol.getLast() lastTime = float(lastTimeName) ctrlDict = ParameterFile(sol.controlDict(), backup=False) endTime = float(ctrlDict.readParameter("endTime")) if abs(endTime - lastTime) / endTime < 1e-5: return "Reached endTime {}".format(endTime) logfile = calcLogname(self.opts.logname, args) isRestart, restartnr, restartName, lastlog = findRestartFiles( logfile, sol) # TODO: look into the logfile if self.lastWrittenTime is not None: if self.lastWrittenTime == lastTimeName: return "Last restart didn't improve on {}. Further restarts make no sense".format( lastTime) self.lastWrittenTime = lastTimeName if data: if "stepNr" in data and data["stepNr"] < self.opts.minimumSteps: return "Only {} steps done while {} are required".format( data["stepNr"], self.opts.minimumSteps) redo = True reason = checkRestart() if reason is not None: self.warning("Not starting:", reason) redo = False self.checkAndCommit(sol) self.initBlink() startNr = 0 self.setLogname() while redo: startNr += 1 print_() print_("Starting restart nr", startNr, "on case", casePath) print_() self.addToCaseLog(casePath, "Restart nr", startNr, "started") run = AnalyzedRunner(BoundingLogAnalyzer( progress=self.opts.progress, doFiles=self.opts.writeFiles, singleFile=self.opts.singleDataFilesOnly, doTimelines=True), silent=self.opts.progress or self.opts.silent, splitThres=self.opts.splitDataPointsThreshold if self.opts.doSplitDataPoints else None, argv=args, server=self.opts.server, lam=lam, logname=self.opts.logname, compressLog=self.opts.compress, logTail=self.opts.logTail, noLog=self.opts.noLog, remark=self.opts.remark, parameters=self.getRunParameters(), echoCommandLine=self.opts.echoCommandPrefix, jobId=self.opts.jobId) run.createPlots(customRegexp=self.lines_, splitThres=self.opts.splitDataPointsThreshold if self.opts.doSplitDataPoints else None, writeFiles=self.opts.writeFiles) if self.cursesWindow: self.cursesWindow.setAnalyzer(run.analyzer) self.cursesWindow.setRunner(run) run.analyzer.addTimeListener(self.cursesWindow) self.addWriteAllTrigger(run, SolutionDirectory(casePath, archive=None)) self.addLibFunctionTrigger( run, SolutionDirectory(casePath, archive=None)) self.runPreHooks() if self.blink1: run.addTicker(lambda: self.blink1.ticToc()) run.start() if run.data["keyboardInterrupt"]: print_() self.warning("Not restarting because of keyboard interrupt") redo = False self.setData({startNr: run.data}) self.runPostHooks() self.reportUsage(run) self.reportRunnerData(run) self.addToCaseLog(casePath, "Restart nr", startNr, "ended") reason = checkRestart(data=run.data) if reason is not None: print_() self.warning("Not starting:", reason) self.addToCaseLog(casePath, "Stopping because of", reason) redo = False if startNr >= self.opts.maximumRestarts: print_() self.warning("Maximum number", self.opts.maximumRestarts, "restarts reached") self.addToCaseLog(casePath, "Stopping because maximum number", self.opts.maximumRestarts, "of restarts reached") redo = False self.stopBlink() self.addToCaseLog(casePath, "Ended") print_() print_("Ended after", startNr, "restarts") print_()
def main(): parser = OptionParser(usage=usage, version=version) parser.add_option("-q", "--quiet", action="store_true", dest="quiet", default=False, help="Only print warnings and errors") parser.add_option("-t", "--template", action="store", dest="controlfile", default=None, help="Generate default controlfile") parser.add_option("-l", "--logfile", action="store", dest="logfile", default=None, help="Writes output to logfile") parser.add_option("-d", "--debug", action="store_true", dest="debug", default=False, help="Writes output to logfile") parser.add_option("-c", "--case", action="store", dest="case", default=None, help="Specifies case directory") (options, args) = parser.parse_args() rootLogger = logging.getLogger('') logger = logging.getLogger('windRunner') reportLevel = logging.INFO if options.quiet: reportLevel = logging.WARNING if options.debug: reportLevel = logging.DEBUG rootLogger.setLevel(reportLevel) if options.logfile is None: console = logging.StreamHandler() console.setLevel(reportLevel) formatter = logging.Formatter( '%(name)-12s: %(levelname)-8s %(message)s') console.setFormatter(formatter) rootLogger.addHandler(console) if options.controlfile is not None: generateCf(options.controlfile, defaultCf) print "Wrote default controlfile" sys.exit(0) if options.logfile is not None: logFileName = path.abspath(options.logfile) if not path.exists(path.dirname(logFileName)): print "Bad argument, directory for logfile does not exist" sys.exit(1) logfile = logging.FileHandler(logFileName, "w") logfile.setLevel(reportLevel) formatter = logging.Formatter( '%(name)-12s: %(levelname)-8s %(message)s') logfile.setFormatter(formatter) rootLogger.addHandler(logfile) if len(args) != 1: parser.error("Incorrect number of arguments") cf = ControlFile.ControlFile(fileName=path.abspath(args[0])) if options.case is not None: casePath = path.abspath(options.case) else: casePath = os.getcwd() caseName = path.basename(casePath) ch = CaseHandler.CaseHandler(casePath) wspeeds = cf.findScalarList("wspeeds:", optional=False) wdirs = cf.findScalarList("wdirs:", optional=False) iterations = cf.findScalar("iterations:", optional=False) inlet_z0 = cf.findScalarList("z0:", optional=False) z0Dict = {} for i, wdir in enumerate(wdirs): z0Dict[wdir] = inlet_z0[i] fieldsToArchive = cf.findStringList("fieldsToArchive:", optional=False) archiveDirName = cf.findString("flowArchiveDirName:", optional=False) restoreArchived = cf.findBoolean("restoreArchived:", optional=True, default=False) nodes = int(cf.findScalar("nodes:", optional=False)) CPUs = cf.findScalar("CPUs:", optional=True) if CPUs is None: nprocesses = 16 * nodes else: nprocesses = int(CPUs) # ----------------------------------- solver = cf.findString("solver:", default="windFoam") initCmds = cf.findStringList("initialize:", default=["setLanduse"]) flowArchive = FoamArchive.FoamArchive(casePath, archiveDirName) nwdir = len(wdirs) convTable = ConvergenceTable.ConvergenceTable(casePath) logger.info("Running windRunner.py") logger.info("Setup overview:") logger.info(25 * "-") logger.info("Case: " + caseName) logger.info(25 * "-") logger.info("Wind directions are: " + str(wdirs)) logger.info("Wind speeds are: " + str(wspeeds)) nruns = nwdir * len(wspeeds) logger.info("Total number of runs: " + str(nruns)) logger.info(25 * "-") logger.info("Number of iterations are: " + str(iterations)) logger.info("Number of nodes are: " + str(nodes)) logger.info("Fields to be archived: " + str(fieldsToArchive)) logger.info(50 * "=") controlDict = ParameterFile(ch.controlDict()) # uses include file from 0/include ABLConditions = ParameterFile( path.join(ch.name, '0', 'include', 'ABLConditions')) compression = controlDict.readParameter("writeCompression") if compression == "compressed" or compression == "on": filesToArchive = [field + ".gz" for field in fieldsToArchive] else: filesToArchive = fieldsToArchive # booting lammachine for parallell execution if nprocesses > 1: Lam = LAMMachine(nr=nprocesses) Lam.writeScotch(ch) controlDict.replaceParameter("stopAt", "nextWrite") timeLeft = iterations * nruns * 20 timeSpent = 05 timeCase = iterations * 20 timeEstimated = time.localtime(time.time() + timeLeft) casesRun = 0 casesLeft = nruns ch.backUpInitialFields() logger.info("Backup made of initial fields") for wspeed in wspeeds: for wdir in wdirs: timeInit = time.time() controlDict.replaceParameter("writeInterval", str(iterations)) logger.info("Running calculations for dir: " + str(wdir) + " speed: " + str(wspeed)) logger.info("Time left: " + str(timeLeft / 60.0) + "min, Time spent: " + str(timeSpent / 60.0) + "min") logger.info("Estimated time for finish: " + str(timeEstimated[:4])) logger.info("Cases finished: " + str(casesRun) + " cases left: " + str(casesLeft)) logger.info(" ") ch.clearResults() dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) logger.info("restoreArchived = " + str(restoreArchived)) if flowArchive.inArchive(dirName=dirName) and not restoreArchived: logger.info('Results already in archive, moving on...') casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) continue logger.info("...Modifying bc:s") ch.modWindDir(ch.initialDir(), wdir) logger.info("bc:s modified!") ABLConditions.replaceParameter("Uref", "%f" % wspeed) ABLConditions.replaceParameter("flowDir", dir2vec(wdir)) ABLConditions.replaceParameter("z0", 'uniform %f' % z0Dict[wdir]) for initCmd in initCmds: initUtil = UtilityRunner(argv=[initCmd, "-case", casePath], silent=True, logname=initCmd) initUtil.start() if initUtil.runOK(): logger.info("Successfully finished: %s" % initCmd) else: logger.error("Error when running: %s" % initCmd) sys.exit(1) if restoreArchived and \ flowArchive.inArchive(dirName=dirName): logger.info("Restoring archived flow fields") flowArchive.restore(dirName, ch.initialDir(), fieldsToArchive) for filename in fieldsToArchive: flowArchive.getFile(outputFile=path.join( ch.initialDir(), filename), fileName=filename, archiveDirName=dirName) logger.info("Restored archived flow fields!") if nprocesses > 1: if Lam.machineOK(): decomposeCmd = "decomposePar" decomposeUtil = UtilityRunner( argv=[decomposeCmd, "-case", casePath], silent=True, logname="decomposePar") logger.info("...Decomposing case to run on" + str(Lam.cpuNr()) + str(" of processors")) decomposeUtil.start() if decomposeUtil.runOK(): logger.info("Case decomposed!") else: logger.error("Error when running decomposePar") sys.exit() else: logger.error("Error: Could not start lam-machine") sys.exit() else: Lam = None logger.info("Serial Run chosen!") logger.info("...Running solver for wind field") windFoamSolver = ConvergenceRunner( StandardLogAnalyzer(), argv=[solver, "-case", casePath], silent=True, lam=Lam, logname=solver) windFoamSolver.start() if windFoamSolver.runOK(): logger.info("Iterations finished for solver") else: logger.error("Error while running solver") sys.exit() if nprocesses > 1: logger.info("Reconstructing decomposed case...") reconstructCmd = "reconstructPar" reconstructUtil = UtilityRunner( argv=[reconstructCmd, "-latestTime", "-case", casePath], silent=True, logname="reconstrucPar") reconstructUtil.start() if reconstructUtil.runOK(): logger.info("recunstruction ready!") else: logger.error("Error while running recontructPar") sys.exit(1) logger.info("Removing decomposed mesh") ch.execute("rm -r " + os.path.join(casePath, "processor*")) logger.info("Removed decomposed mesh!") convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_Ux", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_Uy", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_k", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_epsilon", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "U", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "k", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "epsilon", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "p", casesRun + 1) logger.info("Archiving results from directory: %s" % ch.latestDir()) # save latest concentration result files solFiles = [ file for file in os.listdir(ch.latestDir()) if file in filesToArchive ] for filename in solFiles: dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) flowArchive.addFile(path.join(ch.latestDir(), filename), dirName=dirName) logger.info("Finished wdir: " + str(wdir) + " wspeed: " + str(wspeed) + "Last iter = " + ch.getLast()) logger.info(" ") casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) ch.clearResults() logger.info("Cleared all result directories exept: %s" % (" ".join(ch.getTimes()))) ch.restoreInitialFields() logger.info("Restored initital fields from backup copy") # restoring windData dictionary to original state ABLConditions.purgeFile() convTable.writeProbes() convTable.writeResiduals() logger.info("Residuals and probes from solver " + "written to case/convergence directory") # Restoring controlDict to original state controlDict.purgeFile() logger.info("Finished batch calculation!")
def run(self): config = ConfigParser.ConfigParser() files = self.parser.getArgs() good = config.read(files) # will work with 2.4 # if len(good)!=len(files): # print_("Problem while trying to parse files",files) # print_("Only ",good," could be parsed") # sys.exit(-1) benchName = config.get("General", "name") if self.opts.nameAddition != None: benchName += "_" + self.opts.nameAddition if self.opts.foamVersion != None: benchName += "_v" + self.opts.foamVersion isParallel = config.getboolean("General", "parallel") lam = None if isParallel: nrCpus = config.getint("General", "nProcs") machineFile = config.get("General", "machines") if not path.exists(machineFile): self.error("Machine file ", machineFile, "needed for parallel run") lam = LAMMachine(machineFile, nr=nrCpus) if lam.cpuNr() > nrCpus: self.error("Wrong number of CPUs: ", lam.cpuNr()) print_("Running parallel on", lam.cpuNr(), "CPUs") if config.has_option("General", "casesDirectory"): casesDirectory = path.expanduser( config.get("General", "casesDirectory")) else: casesDirectory = foamTutorials() if not path.exists(casesDirectory): self.error("Directory", casesDirectory, "needed with the benchmark cases is missing") else: print_("Using cases from directory", casesDirectory) benchCases = [] config.remove_section("General") for sec in config.sections(): print_("Reading: ", sec) skipIt = False skipReason = "" if config.has_option(sec, "skip"): skipIt = config.getboolean(sec, "skip") skipReason = "Switched off in file" if self.opts.excases != None and not skipIt: for p in self.opts.excases: if fnmatch(sec, p): skipIt = True skipReason = "Switched off by pattern '" + p + "'" if self.opts.cases != None: for p in self.opts.cases: if fnmatch(sec, p): skipIt = False skipReason = "" if skipIt: print_("Skipping case ..... Reason:" + skipReason) continue sol = config.get(sec, "solver") cas = config.get(sec, "case") pre = eval(config.get(sec, "prepare")) preCon = [] if config.has_option(sec, "preControlDict"): preCon = eval(config.get(sec, "preControlDict")) con = eval(config.get(sec, "controlDict")) bas = config.getfloat(sec, "baseline") wei = config.getfloat(sec, "weight") add = [] if config.has_option(sec, "additional"): add = eval(config.get(sec, "additional")) print_("Adding: ", add) util = [] if config.has_option(sec, "utilities"): util = eval(config.get(sec, "utilities")) print_("Utilities: ", util) nr = 99999 if config.has_option(sec, "nr"): nr = eval(config.get(sec, "nr")) sp = None if config.has_option(sec, "blockSplit"): sp = eval(config.get(sec, "blockSplit")) toRm = [] if config.has_option(sec, "filesToRemove"): toRm = eval(config.get(sec, "filesToRemove")) setInit = [] if config.has_option(sec, "setInitial"): setInit = eval(config.get(sec, "setInitial")) parallelOK = False if config.has_option(sec, "parallelOK"): parallelOK = config.getboolean(sec, "parallelOK") deMet = ["metis"] if config.has_option(sec, "decomposition"): deMet = config.get(sec, "decomposition").split() if deMet[0] == "metis": pass elif deMet[0] == "simple": if len(deMet) < 2: deMet.append(0) else: deMet[1] = int(deMet[1]) else: print_("Unimplemented decomposition method", deMet[0], "switching to metis") deMet = ["metis"] if isParallel == False or parallelOK == True: if path.exists(path.join(casesDirectory, sol, cas)): benchCases.append( (nr, sec, sol, cas, pre, con, preCon, bas, wei, add, util, sp, toRm, setInit, deMet)) else: print_("Skipping", sec, "because directory", path.join(casesDirectory, sol, cas), "could not be found") else: print_("Skipping", sec, "because not parallel") benchCases.sort() parallelString = "" if isParallel: parallelString = ".cpus=" + str(nrCpus) resultFile = open( "Benchmark." + benchName + "." + uname()[1] + parallelString + ".results", "w") totalSpeedup = 0 minSpeedup = None maxSpeedup = None totalWeight = 0 runsOK = 0 currentEstimate = 1. print_("\nStart Benching\n") csv = CSVCollection("Benchmark." + benchName + "." + uname()[1] + parallelString + ".csv") # csvHeaders=["description","solver","case","caseDir","base", # "benchmark","machine","arch","cpus","os","version", # "wallclocktime","cputime","cputimeuser","cputimesystem","maxmemory","cpuusage","speedup"] for nr, description, solver, case, prepare, control, preControl, base, weight, additional, utilities, split, toRemove, setInit, decomposition in benchCases: # control.append( ("endTime",-2000) ) print_("Running Benchmark: ", description) print_("Solver: ", solver) print_("Case: ", case) caseName = solver + "_" + case + "_" + benchName + "." + uname( )[1] + ".case" print_("Short name: ", caseName) caseDir = caseName + ".runDir" csv["description"] = description csv["solver"] = solver csv["case"] = case csv["caseDir"] = caseDir csv["base"] = base csv["benchmark"] = benchName csv["machine"] = uname()[1] csv["arch"] = uname()[4] if lam == None: csv["cpus"] = 1 else: csv["cpus"] = lam.cpuNr() csv["os"] = uname()[0] csv["version"] = uname()[2] workDir = path.realpath(path.curdir) orig = SolutionDirectory(path.join(casesDirectory, solver, case), archive=None, paraviewLink=False) for a in additional + utilities: orig.addToClone(a) orig.cloneCase(path.join(workDir, caseDir)) if oldApp(): argv = [solver, workDir, caseDir] else: argv = [solver, "-case", path.join(workDir, caseDir)] run = BasicRunner(silent=True, argv=argv, logname="BenchRunning", lam=lam) runDir = run.getSolutionDirectory() controlFile = ParameterFile(runDir.controlDict()) for name, value in preControl: print_("Setting parameter", name, "to", value, "in controlDict") controlFile.replaceParameter(name, value) for rm in toRemove: fn = path.join(caseDir, rm) print_("Removing file", fn) remove(fn) for field, bc, val in setInit: print_("Setting", field, "on", bc, "to", val) SolutionFile(runDir.initialDir(), field).replaceBoundary(bc, val) oldDeltaT = controlFile.replaceParameter("deltaT", 0) for u in utilities: print_("Building utility ", u) execute("wmake 2>&1 >%s %s" % (path.join( caseDir, "BenchCompile." + u), path.join(caseDir, u))) print_("Preparing the case: ") if lam != None: prepare = prepare + [("decomposePar", "")] if decomposition[0] == "metis": lam.writeMetis( SolutionDirectory(path.join(workDir, caseDir))) elif decomposition[0] == "simple": lam.writeSimple( SolutionDirectory(path.join(workDir, caseDir)), decomposition[1]) if split: print_("Splitting the mesh:", split) bm = BlockMesh(runDir.blockMesh()) bm.refineMesh(split) for pre, post in prepare: print_("Doing ", pre, " ....") post = post.replace("%case%", caseDir) if oldApp(): args = string.split("%s %s %s %s" % (pre, workDir, caseDir, post)) else: args = string.split( "%s -case %s %s" % (pre, path.join(workDir, caseDir), post)) util = BasicRunner(silent=True, argv=args, logname="BenchPrepare_" + pre) util.start() controlFile.replaceParameter("deltaT", oldDeltaT) # control.append(("endTime",-1000)) for name, value in control: print_("Setting parameter", name, "to", value, "in controlDict") controlFile.replaceParameter(name, value) print_("Starting at ", asctime(localtime(time()))) print_( " Baseline is %f, estimated speedup %f -> estimated end at %s " % (base, currentEstimate, asctime(localtime(time() + base / currentEstimate)))) print_("Running the case ....") run.start() speedup = None cpuUsage = 0 speedupOut = -1 try: speedup = base / run.run.wallTime() cpuUsage = 100. * run.run.cpuTime() / run.run.wallTime() except ZeroDivisionError: print_("Division by Zero: ", run.run.wallTime()) if not run.runOK(): print_("\nWARNING!!!!") print_( "Run had a problem, not using the results. Check the log\n" ) speedup = None if speedup != None: speedupOut = speedup totalSpeedup += speedup * weight totalWeight += weight runsOK += 1 if maxSpeedup == None: maxSpeedup = speedup elif speedup > maxSpeedup: maxSpeedup = speedup if minSpeedup == None: minSpeedup = speedup elif speedup < minSpeedup: minSpeedup = speedup print_("Wall clock: ", run.run.wallTime()) print_("Speedup: ", speedup, " (Baseline: ", base, ")") print_("CPU Time: ", run.run.cpuTime()) print_("CPU Time User: "******"CPU Time System: ", run.run.cpuSystemTime()) print_("Memory: ", run.run.usedMemory()) print_("CPU Usage: %6.2f%%" % (cpuUsage)) csv["wallclocktime"] = run.run.wallTime() csv["cputime"] = run.run.cpuTime() csv["cputimeuser"] = run.run.cpuUserTime() csv["cputimesystem"] = run.run.cpuSystemTime() csv["maxmemory"] = run.run.usedMemory() csv["cpuusage"] = cpuUsage if speedup != None: csv["speedup"] = speedup else: csv["speedup"] = "##" csv.write() resultFile.write( "Case %s WallTime %g CPUTime %g UserTime %g SystemTime %g Memory %g MB Speedup %g\n" % (caseName, run.run.wallTime(), run.run.cpuTime(), run.run.cpuUserTime(), run.run.cpuSystemTime(), run.run.usedMemory(), speedupOut)) resultFile.flush() if speedup != None: currentEstimate = totalSpeedup / totalWeight if self.opts.removeCases: print_("Clearing case", end=" ") if speedup == None: print_("not ... because it failed") else: print_("completely") rmtree(caseDir, ignore_errors=True) print_() print_()