def execute(self, para, log): argv = [self.utility, ".", para['case']] + self.options.split() print_(" Executing and analyzing", " ".join(argv), end=" ") sys.stdout.flush() run = UtilityRunner(argv, silent=True, lam=Command.parallel, logname="_".join(argv)) run.add("data", self.regexp) run.start() data = run.analyzer.getData("data") result = None if data != None: result = [] for a in data: result.append(a) if result == None: print_("no data", end=" ") else: print_(result, end=" ") if run.runOK(): print_() else: print_("---> there was a problem") return run.runOK(), result
def RunUtilities(self, sense='single'): # Get the pressure difference (Using an external utility) pUtil = UtilityRunner( argv=[self.pCmd, "-case", self.case_path, "-latestTime"], silent=True, logname="Pressure") pUtil.add("PressureDifference", "Pressure drop = (%f%) between inlet and outlet", idNr=1) pUtil.start() deltaP = UtilityRunner.get(pUtil, "PressureDifference")[0] tUtil = UtilityRunner( argv=[self.tCmd, "-case", self.case_path, "-latestTime"], silent=True, logname="Temperature") tUtil.add("TemperatureDifference", "Temperature drop = (%f%) between inlet and outlet", idNr=1) tUtil.start() deltaT = UtilityRunner.get(tUtil, "TemperatureDifference")[0] if sense == "multi": return float(deltaT), float(deltaP) else: return float(deltaT)
def RunUtilities(self, sense='single'): # Get the pressure difference (Using an external utility) pUtil = UtilityRunner( argv=[self.pCmd, "-case", self.case_path, "-latestTime"], silent=True, logname="Pressure") pUtil.add("PressureDifference", "Pressure drop = (%f%) between inlet and outlet", idNr=1) pUtil.start() deltaP = UtilityRunner.get(pUtil, "PressureDifference")[0] if sense == "multi": # Get the mass flow (Using an external utility) mUtil = UtilityRunner( argv=[self.mCmd, "-case", self.case_path, "-latestTime"], silent=True, logname="MassFlow") mUtil.add("mass", "Flux at outlet = (%f%)", idNr=1) mUtil.start() massFlow = UtilityRunner.get(mUtil, "mass")[0] return float(deltaP), float(massFlow) else: return float(deltaP)
def RunUtilities(): current = os.getcwd() solver1 = "icoUncoupledKinematicCustomInteractionFoam" # run parcelFoam for one timestep: mCalculated = UtilityRunner(argv=[solver1, "-case", current], silent=True, logname="ParticleEscape") print( "RunUtilities(x): icoUncoupledKinematicCustomInteractionFoam run for 1 timestep" ) mCalculated.add("massEscape", "- escape = outflow_top = (%f%)", idNr=1) mCalculated.add("massIntroduced", "- mass introduced = (%f%)", idNr=1) mCalculated.start() massEscape1 = UtilityRunner.get(mCalculated, "massEscape")[0] print("RunUtilities(): read massEscape to a number", str(massEscape1)) massIntro1 = UtilityRunner.get(mCalculated, "massIntroduced")[0] print("RunUtilities(): read massIntroduced to a number", str(massIntro1)) if ((platform == 'isambard_test') or (platform == 'isca_test')): subprocess.call(['rm', '-rf', current + '/0.011']) print("RunUtilities(): removed 0.011 directory") # remove redundant files subprocess.call([ 'pyFoamClearCase.py', current, '--after=0.01', '--processors-remove' ]) subprocess.call(['rm', '-rf', current + '/0']) print("RunUtilities(): removed processor directories and 0 directory") else: subprocess.call(['rm', '-rf', current + '/262.001']) print("RunUtilities(): removed 262.001 directory") # remove redundant files subprocess.call([ 'pyFoamClearCase.py', current, '--after=262', '--processors-remove' ]) subprocess.call(['rm', '-rf', current + '/0']) print("RunUtilities(): removed processor directories and 0 directory") with open(current + "/efficiency.txt", "a") as myfile_efficiency: myfile_efficiency.write( str((float(massIntro1) - float(massEscape1)) / float(massIntro1)) + '\n') print("RunUtilities(): written efficiency to a file")
def setUp(self): self.dest="/tmp/TestDamBreak" SolutionDirectory(damBreakTutorial(),archive=None,paraviewLink=False).cloneCase(self.dest) if oldApp(): pathSpec=[path.dirname(self.dest),path.basename(self.dest)] else: pathSpec=["-case",self.dest] run=UtilityRunner(argv=["blockMesh"]+pathSpec,silent=True,server=False) run.start()
def setUp(self): self.dest = mktemp() SolutionDirectory(damBreakTutorial(), archive=None, paraviewLink=False).cloneCase(self.dest) if oldApp(): pathSpec = [path.dirname(self.dest), path.basename(self.dest)] else: pathSpec = ["-case", self.dest] run = UtilityRunner(argv=["blockMesh"] + pathSpec, silent=False, server=False) run.start()
def run(self): if self.opts.regexp==None: self.parser.error("Regular expression needed") cName=self.parser.casePath() run=UtilityRunner(argv=self.parser.getArgs(), silent=self.opts.silent, server=True) for i,r in enumerate(self.opts.regexp): name=self.opts.name if len(self.opts.regexp)>1: name="%s_%d" % (name,i) run.add(name,r) self.addToCaseLog(cName,"Starting") run.start() self.addToCaseLog(cName,"Ending") allData=run.data for i,r in enumerate(self.opts.regexp): name=self.opts.name if len(self.opts.regexp)>1: name="%s_%d" % (name,i) fn=path.join(run.getDirname(),name) data=run.analyzer.getData(name) allData["analyzed"][name]=data if data==None: print sys.argv[0]+": No data found for expression",r else: if self.opts.echo: fh=open(fn) print fh.read() fh.close() else: print sys.argv[0]+": Output written to file "+fn self.setData(allData)
def run(self): if self.opts.regexp == None: self.parser.error("Regular expression needed") cName = self.parser.casePath() run = UtilityRunner(argv=self.parser.getArgs(), silent=self.opts.silent, server=True) for i, r in enumerate(self.opts.regexp): name = self.opts.name if len(self.opts.regexp) > 1: name = "%s_%d" % (name, i) run.add(name, r) self.addToCaseLog(cName, "Starting") run.start() self.addToCaseLog(cName, "Ending") allData = run.data for i, r in enumerate(self.opts.regexp): name = self.opts.name if len(self.opts.regexp) > 1: name = "%s_%d" % (name, i) fn = path.join(run.getDirname(), name) data = run.analyzer.getData(name) allData["analyzed"][name] = data if data == None: print_(sys.argv[0] + ": No data found for expression", r) else: if self.opts.echo: fh = open(fn) print_(fh.read()) fh.close() else: print_(sys.argv[0] + ": Output written to file " + fn) self.setData(allData)
def RunUtilities(self, sense='multi'): # Get the pressure difference (Using an external utility) pUtil = UtilityRunner( argv=[self.pCmd, "-case", self.case_path, "-latestTime"], silent=True, logname="Pressure") pUtil.add("PressureDifference", "Total pressure drop = (%f%) between Inlet and Outlet1", idNr=1) pUtil.add("PressureDifference2", "Total pressure drop = (%f%) between Inlet and Outlet2", idNr=1) pUtil.start() deltaP = UtilityRunner.get(pUtil, "PressureDifference")[0] deltaP2 = UtilityRunner.get(pUtil, "PressureDifference2")[0] if sense == "multi": return float(deltaP), float(deltaP2) else: return max(np.abs(float(deltaP)), np.abs(float(deltaP2))),
def run(self): cName = self.parser.casePath() times = self.processTimestepOptions( SolutionDirectory(cName, archive=None)) if len(times) < 1: self.warning("Can't continue without time-steps") return lam = self.getParallel(SolutionDirectory(cName, archive=None)) data = [] for i, t in enumerate(times): print_(" Running for t=", t) run = UtilityRunner( argv=self.parser.getArgs() + ["-time", t], silent=self.opts.progress or self.opts.silent, server=self.opts.server, logname="%s.%s.t=%s" % (self.opts.logname, self.parser.getApplication(), t), compressLog=self.opts.compress, logTail=self.opts.logTail, noLog=self.opts.noLog, echoCommandLine=self.opts.echoCommandPrefix, lam=lam) self.addToCaseLog(cName, "Starting for t=%s", t) run.start() self.setData({t: run.data}) self.addToCaseLog(cName, "Ending") self.reportUsage(run) self.reportRunnerData(run) data.append(run.data) return data
def prepare(self): if isinstance(self.mesher, str): mesher = UtilityRunner( argv=[self.mesher, "-case", self.dire.name], silent=True, logname=self.mesher, ) mesher.start() if not mesher.data["OK"]: raise RuntimeError("Failed running mesher") else: for mesher_step in self.mesher: mesher = UtilityRunner( argv=[mesher_step, "-case", self.dire.name], silent=True, logname=mesher_step, ) mesher.start() if not mesher.data["OK"]: raise RuntimeError(f"Failed running mesher {mesher_step}") if self.np != 1: decomposer = UtilityRunner( argv=["decomposePar", "-case", self.dire.name], silent=True, logname="decomposePar", ) decomposer.start() if not decomposer.data["OK"]: raise RuntimeError("Failed decomposing case")
def RunUtilities(self, sense='single'): lines = [] lines2 = [] if os.path.isdir(self.case_path + "10000"): N = 1000 else: N = 1 subprocess.call( ['pyFoamCopyLastToFirst.py', self.case_path, self.case_path]) subprocess.call([ 'pyFoamClearCase.py', self.case_path, '--processors-remove', '--keep-postprocessing' ]) # Get the pressure difference (Using an external utility) pUtil = UtilityRunner( argv=[self.pCmd, "-case", self.case_path, "-latestTime"], silent=True, logname="Pressure") pUtil.add("PressureDifference", "Pressure drop = (%f%) between inlet and outlet", idNr=1) pUtil.start() deltaP = UtilityRunner.get(pUtil, "PressureDifference")[0] if sense == "multi": # Get the mass flow (Using an external utility) mUtil = UtilityRunner( argv=[self.mCmd, "-case", self.case_path, "-latestTime"], silent=True, logname="MassFlow") mUtil.add("mass", "Flux at outlet = (%f%)", idNr=1) mUtil.start() massFlow = UtilityRunner.get(mUtil, "mass")[0] return -float(deltaP), -float(massFlow) else: return -float(deltaP)
def run(self): cName=self.parser.casePath() times=self.processTimestepOptions(SolutionDirectory(cName,archive=None)) if len(times)<1: self.warning("Can't continue without time-steps") return lam=self.getParallel(SolutionDirectory(cName,archive=None)) data=[] for i,t in enumerate(times): print_(" Running for t=",t) run=UtilityRunner(argv=self.parser.getArgs()+["-time",t], silent=self.opts.progress or self.opts.silent, server=self.opts.server, logname="%s.%s.t=%s" % (self.opts.logname,self.parser.getApplication(),t), compressLog=self.opts.compress, logTail=self.opts.logTail, noLog=self.opts.noLog, lam=lam) self.addToCaseLog(cName,"Starting for t=%s",t) run.start() self.setData({t:run.data}) self.addToCaseLog(cName,"Ending") self.reportUsage(run) self.reportRunnerData(run) data.append(run.data) return data
def execute(self,para,log): argv=[self.utility,".",para['case']]+self.options.split() print_(" Executing and analyzing"," ".join(argv),end=" ") sys.stdout.flush() run=UtilityRunner(argv,silent=True,lam=Command.parallel,logname="_".join(argv)) run.add("data",self.regexp) run.start() data=run.analyzer.getData("data") result=None if data!=None: result=[] for a in data: result.append(a) if result==None: print_("no data",end=" ") else: print_(result,end=" ") if run.runOK(): print_() else: print_("---> there was a problem") return run.runOK(),result
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("-t", "--template", action="store", dest="template", help="Generate default controlfile") parser.add_argument( '-v', action=VerboseAction, dest='loglevel', default=logging.INFO, help='increase verbosity in terminal', ) parser.add_argument( '-l', metavar='logfile', action=LogFileAction, dest='logfile', help='write verbose output to logfile', ) parser.add_argument( "-c", "--case", action="store", dest="case", default=os.getcwd(), help="Specifies case directory (default is current workdir)", ) parser.add_argument(action="store", dest="controlfile", help="Controlfile for speciesRunner") args = parser.parse_args() if args.template is not None: generateCf(args.template, defaultCf) log.info('Wrote default controlfile') sys.exit(0) cf = ControlFile.ControlFile(fileName=args.controlfile) caseName = path.basename(args.case) ch = CaseHandler.CaseHandler(args.case) wspeeds = cf.findScalarList("wspeeds:", optional=False) wdirs = cf.findScalarList("wdirs:", optional=False) iterations = cf.findScalar("iterations:", optional=False) fieldsToArchive = cf.findStringList("fieldsToArchive:", optional=False) flowArchiveDirName = cf.findString("flowArchiveDirName:", optional=True, default='flowArchive') concArchiveDirName = cf.findString("concArchiveDirName:", optional=True, default='concArchive') restoreArchived = cf.findBoolean("restoreArchived:", optional=True, default=False) nodes = int(cf.findScalar("nodes:", optional=False)) CPUs = cf.findScalar("CPUs:", optional=True) if CPUs is None: nprocesses = CORES_PER_NODE * nodes else: nprocesses = int(CPUs) #----------------------------------- solver = cf.findString("solver:", default="speciesFoam") initCmds = cf.findStringList("initialize:", default=[], optional=True) flowArchive = FoamArchive.FoamArchive(args.case, flowArchiveDirName) concArchive = FoamArchive.FoamArchive(args.case, concArchiveDirName) nwdir = len(wdirs) convTable = ConvergenceTable.ConvergenceTable(args.case) log.info("Running speciesFoam") log.info("Setup overview:") log.info(25 * "-") log.info("Case: " + caseName) log.info(25 * "-") log.info("Wind directions are: " + str(wdirs)) log.info("Wind speeds are: " + str(wspeeds)) nruns = nwdir * len(wspeeds) log.info("Total number of runs: " + str(nruns)) log.info(25 * "-") log.info("Number of iterations are: " + str(iterations)) log.info("Number of nodes are: " + str(nodes)) log.info("Fields to be archived: " + str(fieldsToArchive)) log.info(50 * "=") controlDict = ParameterFile(ch.controlDict()) statisticsDict = ExtendedParameterFile.ExtendedParameterFile( path.join(ch.systemDir(), "statisticsDict")) if controlDict.readParameter("writeCompression") == "compressed": filesToArchive = [field + ".gz" for field in fieldsToArchive] flowFiles = [field + ".gz" for field in FLOW_FILES] else: filesToArchive = fieldsToArchive flowFiles = FLOW_FILES # booting lammachine for parallell execution if nprocesses > 1: Lam = LAMMachine(nr=nprocesses) Lam.writeScotch(ch) controlDict.replaceParameter("stopAt", "nextWrite") timeLeft = iterations * nruns * 20 timeSpent = 05 timeCase = iterations * 20 timeEstimated = time.localtime(time.time() + timeLeft) casesRun = 0 casesLeft = nruns log.info("Backing up initial fields") ch.backUpInitialFields() log.info("Backup made of initial fields") for wspeed in wspeeds: for wdir in wdirs: timeInit = time.time() dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) if concArchive.inArchive(dirName=dirName) and not restoreArchived: log.info( 'Results already in concentration archive, moving on...') casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) continue if not flowArchive.inArchive(dirName=dirName): log.warning("Missing flow files in dir: %s, moving on..." % dirName) casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) continue controlDict.replaceParameter("writeInterval", str(iterations)) log.info("Running calculations for dir: " + str(wdir) + " speed: " + str(wspeed)) log.info("Time left: " + str(timeLeft / 60.0) + "min, Time spent: " + str(timeSpent / 60.0) + "min") log.info("Estimated time for finish: " + str(timeEstimated[:4])) log.info("Cases finished: " + str(casesRun) + " cases left: " + str(casesLeft)) log.info(" ") ch.clearResults() log.info("...Modifying bc:s") for f in flowFiles: ch.execute("rm " + path.join(ch.initialDir(), f)) ch.modWindDir(ch.initialDir(), wdir) log.info("bc:s modified!") log.info("Restoring archived flow fields") flowArchive.restore(dirName, ch.initialDir(), flowFiles) # for filename in flowFiles: # flowArchive.getFile( # outputFile=path.join(ch.initialDir(), filename), # fileName=filename, archiveDirName=dirName # ) log.info("Restored archived flow fields!") for initCmd in initCmds: initUtil = UtilityRunner(argv=[initCmd, "-case", args.case], silent=True, logname=initCmd) initUtil.start() if initUtil.runOK(): log.info("Successfully finished: %s" % initCmd) else: log.error("Error when running: %s" % initCmd) sys.exit(1) if nprocesses > 1: if Lam.machineOK(): decomposeCmd = "decomposePar" decomposeUtil = UtilityRunner( argv=[decomposeCmd, "-case", args.case], silent=True, logname="decomposePar") log.info("Decomposing case for %i processors" % Lam.cpuNr()) decomposeUtil.start() if decomposeUtil.runOK(): log.info("Case decomposed!") else: log.error("Error when running decomposePar") sys.exit() else: log.error("Error: Could not start lam-machine") sys.exit() else: Lam = None log.info("Serial Run chosen!") log.info("...Running solver for species") FoamSolver = ConvergenceRunner(StandardLogAnalyzer(), argv=[solver, "-case", args.case], silent=True, lam=Lam, logname=solver) FoamSolver.start() if FoamSolver.runOK(): log.info("Iterations finished for speciesFoam") else: log.error("Error while running speciesFoam") sys.exit() if nprocesses > 1: log.info("Reconstructing decomposed case...") reconstructCmd = "reconstructPar" reconstructUtil = UtilityRunner( argv=[reconstructCmd, "-case", args.case], silent=True, logname="reconstrucPar") reconstructUtil.start() if reconstructUtil.runOK(): log.info("recunstruction ready!") else: log.error("Error while running recontructPar") sys.exit() log.info("Removing decomposed mesh") ch.execute("rm -r " + os.path.join(args.case, "processor*")) log.info("Removed decomposed mesh!") iterationsReady = (int(ch.getLast()) - int(path.basename(ch.initialDir()))) if iterationsReady < iterations: log.warning("Run was aborted before finalizing" + " the wanted number of iterations") log.warning("Guessing that nan:s were present in results. " + "Removing results from current run and moving on") log.info("Archiving results") # save latest concentration result files solFiles = [ f for f in os.listdir(ch.latestDir()) if f[:4] == "spec" and f[:12] != "spec_default" and ".bak" not in f and "~" not in f and "#" not in f ] for filename in solFiles: dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) concArchive.addFile(path.join(ch.latestDir(), filename), dirName=dirName) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), "speciesFoam", "linear_" + filename, casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), filename, casesRun + 1) log.info("Residuals and probes from solver " + "speciesFoam added to convergence table") # Adding the list of names of archived concentration # files to the statisticsDict dictionary archivedConcFiles = concArchive.listFilesInDirs("spec_") statisticsDict.replaceParameterList("concFileList", archivedConcFiles) log.info("Finished wdir: %f, wspeed: %f, Last iter: %s" % (wdir, wspeed, ch.getLast())) log.info(" ") casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) ch.clearResults() ch.restoreInitialFields() log.info("Restored initital fields") # restoring windData dictionary to original state convTable.writeProbes() convTable.writeResiduals() log.info("Residuals and probes from solver windFoam " + "written to case/convergence directory") # Restoring controlDict to original state controlDict.purgeFile() log.info("Finished batch calculation!")
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( "-t", "--template", action="store", dest="template", help="Generate default controlfile" ) parser.add_argument( '-v', action=VerboseAction, dest='loglevel', default=logging.INFO, help='increase verbosity in terminal', ) parser.add_argument( '-l', metavar='logfile', action=LogFileAction, dest='logfile', help='write verbose output to logfile', ) parser.add_argument( "-c", "--case", action="store", dest="case", default=os.getcwd(), help="Specifies case directory (default is current workdir)", ) parser.add_argument( action="store", dest="controlfile", help="Controlfile for speciesRunner" ) args = parser.parse_args() if args.template is not None: generateCf(args.template, defaultCf) log.info('Wrote default controlfile') sys.exit(0) cf = ControlFile.ControlFile(fileName=args.controlfile) caseName = path.basename(args.case) ch = CaseHandler.CaseHandler(args.case) wspeeds = cf.findScalarList("wspeeds:", optional=False) wdirs = cf.findScalarList("wdirs:", optional=False) iterations = cf.findScalar("iterations:", optional=False) fieldsToArchive = cf.findStringList("fieldsToArchive:", optional=False) flowArchiveDirName = cf.findString( "flowArchiveDirName:", optional=True, default='flowArchive' ) concArchiveDirName = cf.findString( "concArchiveDirName:", optional=True, default='concArchive' ) restoreArchived = cf.findBoolean( "restoreArchived:", optional=True, default=False ) nodes = int(cf.findScalar("nodes:", optional=False)) CPUs = cf.findScalar("CPUs:", optional=True) if CPUs is None: nprocesses = CORES_PER_NODE * nodes else: nprocesses = int(CPUs) #----------------------------------- solver = cf.findString("solver:", default="speciesFoam") initCmds = cf.findStringList("initialize:", default=[], optional=True) flowArchive = FoamArchive.FoamArchive(args.case, flowArchiveDirName) concArchive = FoamArchive.FoamArchive(args.case, concArchiveDirName) nwdir = len(wdirs) convTable = ConvergenceTable.ConvergenceTable(args.case) log.info("Running speciesFoam") log.info("Setup overview:") log.info(25 * "-") log.info("Case: " + caseName) log.info(25 * "-") log.info("Wind directions are: " + str(wdirs)) log.info("Wind speeds are: " + str(wspeeds)) nruns = nwdir * len(wspeeds) log.info("Total number of runs: " + str(nruns)) log.info(25 * "-") log.info("Number of iterations are: " + str(iterations)) log.info("Number of nodes are: " + str(nodes)) log.info("Fields to be archived: " + str(fieldsToArchive)) log.info(50 * "=") controlDict = ParameterFile(ch.controlDict()) statisticsDict = ExtendedParameterFile.ExtendedParameterFile( path.join(ch.systemDir(), "statisticsDict") ) if controlDict.readParameter("writeCompression") == "compressed": filesToArchive = [field + ".gz" for field in fieldsToArchive] flowFiles = [field + ".gz" for field in FLOW_FILES] else: filesToArchive = fieldsToArchive flowFiles = FLOW_FILES # booting lammachine for parallell execution if nprocesses > 1: Lam = LAMMachine(nr=nprocesses) Lam.writeScotch(ch) controlDict.replaceParameter("stopAt", "nextWrite") timeLeft = iterations * nruns * 20 timeSpent = 05 timeCase = iterations * 20 timeEstimated = time.localtime(time.time() + timeLeft) casesRun = 0 casesLeft = nruns log.info("Backing up initial fields") ch.backUpInitialFields() log.info("Backup made of initial fields") for wspeed in wspeeds: for wdir in wdirs: timeInit = time.time() dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) if concArchive.inArchive(dirName=dirName) and not restoreArchived: log.info('Results already in concentration archive, moving on...') casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) continue if not flowArchive.inArchive(dirName=dirName): log.warning("Missing flow files in dir: %s, moving on..." % dirName) casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) continue controlDict.replaceParameter("writeInterval", str(iterations)) log.info( "Running calculations for dir: " + str(wdir) + " speed: " + str(wspeed) ) log.info( "Time left: " + str(timeLeft / 60.0) + "min, Time spent: "+str(timeSpent / 60.0) + "min" ) log.info( "Estimated time for finish: " + str(timeEstimated[:4]) ) log.info( "Cases finished: " + str(casesRun) + " cases left: " + str(casesLeft) ) log.info(" ") ch.clearResults() log.info("...Modifying bc:s") for f in flowFiles: ch.execute("rm " + path.join(ch.initialDir(), f)) ch.modWindDir(ch.initialDir(), wdir) log.info("bc:s modified!") log.info("Restoring archived flow fields") flowArchive.restore(dirName, ch.initialDir(), flowFiles) # for filename in flowFiles: # flowArchive.getFile( # outputFile=path.join(ch.initialDir(), filename), # fileName=filename, archiveDirName=dirName # ) log.info("Restored archived flow fields!") for initCmd in initCmds: initUtil = UtilityRunner( argv=[initCmd, "-case", args.case], silent=True, logname=initCmd ) initUtil.start() if initUtil.runOK(): log.info( "Successfully finished: %s" % initCmd ) else: log.error( "Error when running: %s" % initCmd ) sys.exit(1) if nprocesses > 1: if Lam.machineOK(): decomposeCmd = "decomposePar" decomposeUtil = UtilityRunner( argv=[decomposeCmd, "-case", args.case], silent=True, logname="decomposePar" ) log.info( "Decomposing case for %i processors" % Lam.cpuNr() ) decomposeUtil.start() if decomposeUtil.runOK(): log.info("Case decomposed!") else: log.error("Error when running decomposePar") sys.exit() else: log.error("Error: Could not start lam-machine") sys.exit() else: Lam = None log.info("Serial Run chosen!") log.info("...Running solver for species") FoamSolver = ConvergenceRunner( StandardLogAnalyzer(), argv=[solver, "-case", args.case], silent=True, lam=Lam, logname=solver ) FoamSolver.start() if FoamSolver.runOK(): log.info("Iterations finished for speciesFoam") else: log.error("Error while running speciesFoam") sys.exit() if nprocesses > 1: log.info("Reconstructing decomposed case...") reconstructCmd = "reconstructPar" reconstructUtil = UtilityRunner( argv=[reconstructCmd, "-case", args.case], silent=True, logname="reconstrucPar" ) reconstructUtil.start() if reconstructUtil.runOK(): log.info("recunstruction ready!") else: log.error("Error while running recontructPar") sys.exit() log.info("Removing decomposed mesh") ch.execute( "rm -r " + os.path.join(args.case, "processor*") ) log.info("Removed decomposed mesh!") iterationsReady = ( int(ch.getLast()) - int(path.basename(ch.initialDir())) ) if iterationsReady < iterations: log.warning( "Run was aborted before finalizing" + " the wanted number of iterations" ) log.warning( "Guessing that nan:s were present in results. " + "Removing results from current run and moving on" ) log.info("Archiving results") # save latest concentration result files solFiles = [ f for f in os.listdir(ch.latestDir()) if f[:4] == "spec" and f[:12] != "spec_default" and ".bak" not in f and "~" not in f and "#" not in f ] for filename in solFiles: dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) concArchive.addFile( path.join(ch.latestDir(), filename), dirName=dirName ) convTable.addResidual( "wd_" + str(wdir) + "_ws_" + str(wspeed), "speciesFoam", "linear_" + filename, casesRun+1 ) convTable.addProbes( "wd_" + str(wdir) + "_ws_" + str(wspeed), filename, casesRun + 1 ) log.info( "Residuals and probes from solver " + "speciesFoam added to convergence table" ) # Adding the list of names of archived concentration # files to the statisticsDict dictionary archivedConcFiles = concArchive.listFilesInDirs("spec_") statisticsDict.replaceParameterList( "concFileList", archivedConcFiles ) log.info("Finished wdir: %f, wspeed: %f, Last iter: %s" % ( wdir, wspeed, ch.getLast()) ) log.info(" ") casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) ch.clearResults() ch.restoreInitialFields() log.info("Restored initital fields") # restoring windData dictionary to original state convTable.writeProbes() convTable.writeResiduals() log.info( "Residuals and probes from solver windFoam " + "written to case/convergence directory" ) # Restoring controlDict to original state controlDict.purgeFile() log.info("Finished batch calculation!")
def main(): parser = OptionParser(usage=usage, version=version) parser.add_option("-q", "--quiet", action="store_true", dest="quiet", default=False, help="Only print warnings and errors") parser.add_option("-t", "--template", action="store", dest="controlfile", default=None, help="Generate default controlfile") parser.add_option("-l", "--logfile", action="store", dest="logfile", default=None, help="Writes output to logfile") parser.add_option("-d", "--debug", action="store_true", dest="debug", default=False, help="Writes output to logfile") parser.add_option("-c", "--case", action="store", dest="case", default=None, help="Specifies case directory") (options, args) = parser.parse_args() rootLogger = logging.getLogger('') logger = logging.getLogger('canopyRunner') reportLevel = logging.INFO if options.quiet: reportLevel = logging.WARNING if options.debug: reportLevel = logging.DEBUG rootLogger.setLevel(reportLevel) if options.logfile == None: console = logging.StreamHandler() console.setLevel(reportLevel) formatter = logging.Formatter( '%(name)-12s: %(levelname)-8s %(message)s') console.setFormatter(formatter) rootLogger.addHandler(console) if options.controlfile != None: generateCf(path.abspath(options.controlfile)) print "Wrote default controlfile" sys.exit() if options.logfile != None: logFileName = path.abspath(options.logfile) if not path.exists(path.dirname(logFileName)): print "Bad argument, directory for logfile does not exist" sys.exit() logfile = logging.FileHandler(logFileName, "w") logfile.setLevel(reportLevel) formatter = logging.Formatter( '%(name)-12s: %(levelname)-8s %(message)s') logfile.setFormatter(formatter) rootLogger.addHandler(logfile) if len(args) != 1: parser.error("Incorrect number of arguments") cf = ControlFile.ControlFile(fileName=path.abspath(args[0])) if options.case != None: casePath = path.abspath(options.case) else: casePath = os.getcwd() caseName = path.basename(casePath) ch = CaseHandler.CaseHandler(casePath) wspeeds = cf.findScalarList("wspeeds:", optional=False) wdirs = cf.findScalarList("wdirs:", optional=False) iterations = cf.findScalar("iterations:", optional=False) inletProfile_z0 = cf.findScalar("z0:", optional=False) fieldsToArchive = cf.findStringList("fieldsToArchive:", optional=False) archiveDirName = cf.findString("flowArchiveDirName:", optional=False) restoreArchived = cf.findBoolean("restoreArchived:", optional=True, default=False) archiveVTK = cf.findBoolean("archiveVTK:", optional=False) VTKArchiveDir = cf.findExistingPath("VTKArchiveDir:", optional=False) nodes = int(cf.findScalar("nodes:", optional=False)) CPUs = cf.findScalar("CPUs:", optional=True) if CPUs == None: nprocesses = 8 * nodes else: nprocesses = int(CPUs) #----------------------------------- solver = cf.findString("solver:", default="windFoam") softStart = cf.findString("softstart_application:", optional=True) initCmds = cf.findStringList("initialize:", default=["setWindInlet"]) flowArchive = FoamArchive.FoamArchive(casePath, archiveDirName) nwdir = len(wdirs) convTable = ConvergenceTable.ConvergenceTable(casePath) logger.info("Running windRunner.py") logger.info("Setup overview:") logger.info(25 * "-") logger.info("Case: " + caseName) logger.info(25 * "-") logger.info("Wind directions are: " + str(wdirs)) logger.info("Wind speeds are: " + str(wspeeds)) nruns = nwdir * len(wspeeds) logger.info("Total number of runs: " + str(nruns)) logger.info(25 * "-") logger.info("Number of iterations are: " + str(iterations)) logger.info("Number of nodes are: " + str(nodes)) logger.info("Fields to be archived: " + str(fieldsToArchive)) logger.info("ArchiveToVTK is set to: " + str(archiveVTK)) logger.info(50 * "=") controlDict = ParameterFile(ch.controlDict()) windDict = ParameterFile(path.join(ch.constantDir(), "windDict")) RASDict = ParameterFile(path.join(ch.constantDir(), "RASProperties")) compression = controlDict.readParameter("writeCompression") if compression == "compressed" or compression == "on": filesToArchive = [field + ".gz" for field in fieldsToArchive] else: filesToArchive = fieldsToArchive if not path.exists(VTKArchiveDir) and archiveVTK: logger.error("The VTKArchiveDir does not exist") sys.exit() #booting lammachine for parallell execution if nprocesses > 1: Lam = LAMMachine(nr=nprocesses) Lam.writeScotch(ch) controlDict.replaceParameter("stopAt", "nextWrite") timeLeft = iterations * nruns * 20 timeSpent = 05 timeCase = iterations * 20 timeEstimated = time.localtime(time.time() + timeLeft) casesRun = 0 casesLeft = nruns ch.backUpInitialFields() logger.info("Backup made of initial fields") for wspeed in wspeeds: for wdir in wdirs: timeInit = time.time() controlDict.replaceParameter("writeInterval", str(iterations)) logger.info("Running calculations for dir: " + str(wdir) + " speed: " + str(wspeed)) logger.info("Time left: " + str(timeLeft / 60.0) + "min, Time spent: " + str(timeSpent / 60.0) + "min") logger.info("Estimated time for finish: " + str(timeEstimated[:4])) logger.info("Cases finished: " + str(casesRun) + " cases left: " + str(casesLeft)) logger.info(" ") ch.clearResults() dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) logger.info("restoreArchived = " + str(restoreArchived)) if restoreArchived and flowArchive.inArchive(dirName=dirName): logger.info("Restoring archived flow fields") flowArchive.restore(dirName, fieldsToArchive, ch.initialDir()) for filename in fieldsToArchive: flowArchive.getFile(outputFile=path.join( ch.initialDir(), filename), fileName=filename, archiveDirName=dirName) logger.info("Restored archived flow fields!") else: logger.info("...Modifying bc:s") ch.modWindDir(ch.initialDir(), wdir) logger.info("bc:s modified!") logger.info("...Setting inlet profiles") windDict.replaceParameter("U10", str(wspeed)) windDict.replaceParameter("windDirection", str(wdir)) windDict.replaceParameter("z0", str(inletProfile_z0)) for initCmd in initCmds: initUtil = UtilityRunner(argv=[initCmd, "-case", casePath], silent=True, logname=initCmd) initUtil.start() if initUtil.runOK(): logger.info("Successfully finished: %s" % initCmd) else: logger.error("Error when running: %s" % initCmd) sys.exit() if nprocesses > 1: if Lam.machineOK(): decomposeCmd = "decomposePar" decomposeUtil = UtilityRunner( argv=[decomposeCmd, "-case", casePath], silent=True, logname="decomposePar") logger.info("...Decomposing case to run on" + str(Lam.cpuNr()) + str(" of processors")) decomposeUtil.start() if decomposeUtil.runOK(): logger.info("Case decomposed!") else: logger.error("Error when running decomposePar") sys.exit() else: logger.error("Error: Could not start lam-machine") sys.exit() else: Lam = None logger.info("Serial Run chosen!") if softStart != None: RASDict.replaceParameter("RASModel", "kEpsilon") controlDict.replaceParameter("stopAt", "nextWrite") controlDict.replaceParameter("writeInterval", "50") logger.info("...Softstarting using " + softStart) windFoamSolver = ConvergenceRunner( StandardLogAnalyzer(), argv=[softStart, "-case", casePath], silent=True, lam=Lam, logname=softStart) windFoamSolver.start() if windFoamSolver.runOK(): logger.info("Iterations finished for windFoam") else: logger.error("Error while running windFoam") sys.exit() RASDict.replaceParameter("RASModel", "kEpsilon_canopy") controlDict.replaceParameter("writeInterval", str(iterations)) # The folowing line is to copy the landuse and LAD-files after the first iterations with simpleFoam ch.execute( "for file in " + os.path.join(casePath, "processor*/0/[Ll]*") + r'; do for folder in ${file%0*}*; do [ -e ${folder}/`basename ${file}` ] || cp $file ${folder}/`basename ${file}`; done; done' ) logger.info("...Running solver for wind field") windFoamSolver = ConvergenceRunner( StandardLogAnalyzer(), argv=[solver, "-case", casePath], silent=True, lam=Lam, logname=solver) windFoamSolver.start() if windFoamSolver.runOK(): logger.info("Iterations finished for windFoam") else: logger.error("Error while running windFoam") sys.exit() if nprocesses > 1: logger.info("Reconstructing decomposed case...") reconstructCmd = "reconstructPar" reconstructUtil = UtilityRunner( argv=[reconstructCmd, "-latestTime", "-case", casePath], silent=True, logname="reconstrucPar") reconstructUtil.start() if reconstructUtil.runOK(): logger.info("recunstruction ready!") else: logger.error("Error while running recontructPar") sys.exit() logger.info("Removing decomposed mesh") ch.execute("rm -r " + os.path.join(casePath, "processor*")) logger.info("Removed decomposed mesh!") convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_Ux", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_Uy", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_k", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_epsilon", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "U", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "k", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "epsilon", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "p", casesRun + 1) logger.info("Archiving results from directory: %s" % ch.latestDir()) #save latest concentration result files solFiles = [ file for file in os.listdir(ch.latestDir()) if file in filesToArchive ] for filename in solFiles: dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) flowArchive.addFile(path.join(ch.latestDir(), filename), dirName=dirName) if archiveVTK: #Creating a temporary last time directory to be used by foamToVTK VTKTime = str(eval(path.basename(ch.latestDir())) + 1) newTimeDir = path.join(casePath, VTKTime) os.mkdir(newTimeDir) for filename in solFiles: oldFile = path.join(casePath, str(eval(ch.getLast()) - 1), filename) ch.execute("cp " + oldFile + " " + newTimeDir + "/") foamToVTKUtil = UtilityRunner( argv=["foamToVTK", "-case", casePath, "-time " + VTKTime], silent=True, logname="foamToVTK") foamToVTKUtil.start() if foamToVTKUtil.runOK(): ch.execute("mv " + path.join(casePath, "VTK") + " " + path.join( VTKArchiveDir, "VTK" + "_wspeed_" + str(wspeed) + "_wdir_" + str(wdir))) ch.execute("rm -r " + path.join(casePath, VTKTime)) logger.info("Exported to VTK archive!") else: logger.error("Error when exporting to VTK") sys.exit() logger.info("Finished wdir: " + str(wdir) + " wspeed: " + str(wspeed) + "Last iter = " + ch.getLast()) logger.info(" ") casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) ch.clearResults() logger.info("Cleared all result directories exept: %s" % (" ".join(ch.getTimes()))) ch.restoreInitialFields() logger.info("Restored initital fields from backup copy") #restoring windData dictionary to original state windDict.purgeFile() convTable.writeProbes() convTable.writeResiduals() logger.info( "Residuals and probes from solver windFoam written to case/convergence directory" ) #Restoring controlDict to original state controlDict.purgeFile() logger.info("Finished batch calculation!")
def main(): parser = OptionParser(usage=usage, version=version) parser.add_option("-q", "--quiet", action="store_true", dest="quiet", default=False, help="Only print warnings and errors") parser.add_option("-t", "--template", action="store", dest="controlfile", default=None, help="Generate default controlfile") parser.add_option("-l", "--logfile", action="store", dest="logfile", default=None, help="Writes output to logfile") parser.add_option("-d", "--debug", action="store_true", dest="debug", default=False, help="Writes output to logfile") parser.add_option("-c", "--case", action="store", dest="case", default=None, help="Specifies case directory") (options, args) = parser.parse_args() rootLogger = logging.getLogger('') logger = logging.getLogger('windRunner') reportLevel = logging.INFO if options.quiet: reportLevel = logging.WARNING if options.debug: reportLevel = logging.DEBUG rootLogger.setLevel(reportLevel) if options.logfile is None: console = logging.StreamHandler() console.setLevel(reportLevel) formatter = logging.Formatter( '%(name)-12s: %(levelname)-8s %(message)s') console.setFormatter(formatter) rootLogger.addHandler(console) if options.controlfile is not None: generateCf(options.controlfile, defaultCf) print "Wrote default controlfile" sys.exit(0) if options.logfile is not None: logFileName = path.abspath(options.logfile) if not path.exists(path.dirname(logFileName)): print "Bad argument, directory for logfile does not exist" sys.exit(1) logfile = logging.FileHandler(logFileName, "w") logfile.setLevel(reportLevel) formatter = logging.Formatter( '%(name)-12s: %(levelname)-8s %(message)s') logfile.setFormatter(formatter) rootLogger.addHandler(logfile) if len(args) != 1: parser.error("Incorrect number of arguments") cf = ControlFile.ControlFile(fileName=path.abspath(args[0])) if options.case is not None: casePath = path.abspath(options.case) else: casePath = os.getcwd() caseName = path.basename(casePath) ch = CaseHandler.CaseHandler(casePath) wspeeds = cf.findScalarList("wspeeds:", optional=False) wdirs = cf.findScalarList("wdirs:", optional=False) iterations = cf.findScalar("iterations:", optional=False) inlet_z0 = cf.findScalarList("z0:", optional=False) z0Dict = {} for i, wdir in enumerate(wdirs): z0Dict[wdir] = inlet_z0[i] fieldsToArchive = cf.findStringList("fieldsToArchive:", optional=False) archiveDirName = cf.findString("flowArchiveDirName:", optional=False) restoreArchived = cf.findBoolean("restoreArchived:", optional=True, default=False) nodes = int(cf.findScalar("nodes:", optional=False)) CPUs = cf.findScalar("CPUs:", optional=True) if CPUs is None: nprocesses = 16 * nodes else: nprocesses = int(CPUs) # ----------------------------------- solver = cf.findString("solver:", default="windFoam") initCmds = cf.findStringList("initialize:", default=["setLanduse"]) flowArchive = FoamArchive.FoamArchive(casePath, archiveDirName) nwdir = len(wdirs) convTable = ConvergenceTable.ConvergenceTable(casePath) logger.info("Running windRunner.py") logger.info("Setup overview:") logger.info(25 * "-") logger.info("Case: " + caseName) logger.info(25 * "-") logger.info("Wind directions are: " + str(wdirs)) logger.info("Wind speeds are: " + str(wspeeds)) nruns = nwdir * len(wspeeds) logger.info("Total number of runs: " + str(nruns)) logger.info(25 * "-") logger.info("Number of iterations are: " + str(iterations)) logger.info("Number of nodes are: " + str(nodes)) logger.info("Fields to be archived: " + str(fieldsToArchive)) logger.info(50 * "=") controlDict = ParameterFile(ch.controlDict()) # uses include file from 0/include ABLConditions = ParameterFile( path.join(ch.name, '0', 'include', 'ABLConditions')) compression = controlDict.readParameter("writeCompression") if compression == "compressed" or compression == "on": filesToArchive = [field + ".gz" for field in fieldsToArchive] else: filesToArchive = fieldsToArchive # booting lammachine for parallell execution if nprocesses > 1: Lam = LAMMachine(nr=nprocesses) Lam.writeScotch(ch) controlDict.replaceParameter("stopAt", "nextWrite") timeLeft = iterations * nruns * 20 timeSpent = 05 timeCase = iterations * 20 timeEstimated = time.localtime(time.time() + timeLeft) casesRun = 0 casesLeft = nruns ch.backUpInitialFields() logger.info("Backup made of initial fields") for wspeed in wspeeds: for wdir in wdirs: timeInit = time.time() controlDict.replaceParameter("writeInterval", str(iterations)) logger.info("Running calculations for dir: " + str(wdir) + " speed: " + str(wspeed)) logger.info("Time left: " + str(timeLeft / 60.0) + "min, Time spent: " + str(timeSpent / 60.0) + "min") logger.info("Estimated time for finish: " + str(timeEstimated[:4])) logger.info("Cases finished: " + str(casesRun) + " cases left: " + str(casesLeft)) logger.info(" ") ch.clearResults() dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) logger.info("restoreArchived = " + str(restoreArchived)) if flowArchive.inArchive(dirName=dirName) and not restoreArchived: logger.info('Results already in archive, moving on...') casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) continue logger.info("...Modifying bc:s") ch.modWindDir(ch.initialDir(), wdir) logger.info("bc:s modified!") ABLConditions.replaceParameter("Uref", "%f" % wspeed) ABLConditions.replaceParameter("flowDir", dir2vec(wdir)) ABLConditions.replaceParameter("z0", 'uniform %f' % z0Dict[wdir]) for initCmd in initCmds: initUtil = UtilityRunner(argv=[initCmd, "-case", casePath], silent=True, logname=initCmd) initUtil.start() if initUtil.runOK(): logger.info("Successfully finished: %s" % initCmd) else: logger.error("Error when running: %s" % initCmd) sys.exit(1) if restoreArchived and \ flowArchive.inArchive(dirName=dirName): logger.info("Restoring archived flow fields") flowArchive.restore(dirName, ch.initialDir(), fieldsToArchive) for filename in fieldsToArchive: flowArchive.getFile(outputFile=path.join( ch.initialDir(), filename), fileName=filename, archiveDirName=dirName) logger.info("Restored archived flow fields!") if nprocesses > 1: if Lam.machineOK(): decomposeCmd = "decomposePar" decomposeUtil = UtilityRunner( argv=[decomposeCmd, "-case", casePath], silent=True, logname="decomposePar") logger.info("...Decomposing case to run on" + str(Lam.cpuNr()) + str(" of processors")) decomposeUtil.start() if decomposeUtil.runOK(): logger.info("Case decomposed!") else: logger.error("Error when running decomposePar") sys.exit() else: logger.error("Error: Could not start lam-machine") sys.exit() else: Lam = None logger.info("Serial Run chosen!") logger.info("...Running solver for wind field") windFoamSolver = ConvergenceRunner( StandardLogAnalyzer(), argv=[solver, "-case", casePath], silent=True, lam=Lam, logname=solver) windFoamSolver.start() if windFoamSolver.runOK(): logger.info("Iterations finished for solver") else: logger.error("Error while running solver") sys.exit() if nprocesses > 1: logger.info("Reconstructing decomposed case...") reconstructCmd = "reconstructPar" reconstructUtil = UtilityRunner( argv=[reconstructCmd, "-latestTime", "-case", casePath], silent=True, logname="reconstrucPar") reconstructUtil.start() if reconstructUtil.runOK(): logger.info("recunstruction ready!") else: logger.error("Error while running recontructPar") sys.exit(1) logger.info("Removing decomposed mesh") ch.execute("rm -r " + os.path.join(casePath, "processor*")) logger.info("Removed decomposed mesh!") convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_Ux", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_Uy", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_k", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_epsilon", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "U", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "k", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "epsilon", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "p", casesRun + 1) logger.info("Archiving results from directory: %s" % ch.latestDir()) # save latest concentration result files solFiles = [ file for file in os.listdir(ch.latestDir()) if file in filesToArchive ] for filename in solFiles: dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) flowArchive.addFile(path.join(ch.latestDir(), filename), dirName=dirName) logger.info("Finished wdir: " + str(wdir) + " wspeed: " + str(wspeed) + "Last iter = " + ch.getLast()) logger.info(" ") casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) ch.clearResults() logger.info("Cleared all result directories exept: %s" % (" ".join(ch.getTimes()))) ch.restoreInitialFields() logger.info("Restored initital fields from backup copy") # restoring windData dictionary to original state ABLConditions.purgeFile() convTable.writeProbes() convTable.writeResiduals() logger.info("Residuals and probes from solver " + "written to case/convergence directory") # Restoring controlDict to original state controlDict.purgeFile() logger.info("Finished batch calculation!")
f.write(str(gen)) f.close() self.setLogname(default="Decomposer", useApplication=False) run = UtilityRunner( argv=argv, silent=self.opts.progress or self.opts.silent, logname=self.opts.logname, compressLog=self.opts.compress, server=self.opts.server, noLog=self.opts.noLog, logTail=self.opts.logTail, echoCommandLine=self.opts.echoCommandPrefix, jobId=self.opts.jobId) run.start() if theRegion != None and not decomposeParWithRegion: print_("Syncing into master case") regions.resync(theRegion) if regions != None and not decomposeParWithRegion: if not self.opts.keeppseudo: print_("Removing pseudo-regions") regions.cleanAll() else: for r in sol.getRegions(): if r not in regionNames: regions.clean(r) if self.opts.doConstantLinks:
for i in range(nr+1): # Set the boundary condition at the inlet val=(maximum*i)/nr print "Inlet velocity:",val sol.replaceBoundary("inlet","(%f 0 0)" %(val)) # Run the solver run=ConvergenceRunner(BoundingLogAnalyzer(),argv=[solver,"-case",case],silent=True) run.start() print "Last Time = ",dire.getLast() # Get the pressure difference (Using an external utility) pUtil=UtilityRunner(argv=[pCmd,"-case",case],silent=True,logname="Pressure") pUtil.add("deltaP","Pressure at .* Difference .*\] (.+)") pUtil.start() deltaP=pUtil.get("deltaP")[0] # Get the mass flow mUtil=UtilityRunner(argv=[mCmd,"-case",case,"-latestTime"],silent=True,logname="MassFlow") mUtil.add("mass","Flux at (.+?) .*\] (.+)",idNr=1) mUtil.start() massFlow=mUtil.get("mass",ID="outlet")[0] # Archive the results dire.lastToArchive("vel=%g" % (val)) # Clear results dire.clearResults()
sol.replaceBoundary("inlet", "(%f 0 0)" % (val)) # Run the solver run = ConvergenceRunner(BoundingLogAnalyzer(), argv=[solver, "-case", case], silent=True) run.start() print "Last Time = ", dire.getLast() # Get the pressure difference (Using an external utility) pUtil = UtilityRunner(argv=[pCmd, "-case", case], silent=True, logname="Pressure") pUtil.add("deltaP", "Pressure at .* Difference .*\] (.+)") pUtil.start() deltaP = pUtil.get("deltaP")[0] # Get the mass flow mUtil = UtilityRunner(argv=[mCmd, "-case", case, "-latestTime"], silent=True, logname="MassFlow") mUtil.add("mass", "Flux at (.+?) .*\] (.+)", idNr=1) mUtil.start() massFlow = mUtil.get("mass", ID="outlet")[0] # Archive the results dire.lastToArchive("vel=%g" % (val))
def run(self): decomposeParWithRegion=(foamVersion()>=(1,6)) if self.opts.keeppseudo and (not self.opts.regions and self.opts.region==None): warning("Option --keep-pseudocases only makes sense for multi-region-cases") if decomposeParWithRegion and self.opts.keeppseudo: warning("Option --keep-pseudocases doesn't make sense since OpenFOAM 1.6 because decomposePar supports regions") nr=int(self.parser.getArgs()[1]) if nr<2: error("Number of processors",nr,"too small (at least 2)") case=path.abspath(self.parser.getArgs()[0]) method=self.opts.method result={} result["numberOfSubdomains"]=nr result["method"]=method coeff={} result[method+"Coeffs"]=coeff if self.opts.globalFaceZones!=None: try: fZones=eval(self.opts.globalFaceZones) except SyntaxError: fZones=FoamStringParser( self.opts.globalFaceZones, listDict=True ).data result["globalFaceZones"]=fZones if method in ["metis","scotch","parMetis"]: if self.opts.processorWeights!=None: weigh=eval(self.opts.processorWeights) if nr!=len(weigh): error("Number of processors",nr,"and length of",weigh,"differ") coeff["processorWeights"]=weigh elif method=="manual": if self.opts.dataFile==None: error("Missing required option dataFile") else: coeff["dataFile"]="\""+self.opts.dataFile+"\"" elif method=="simple" or method=="hierarchical": if self.opts.n==None or self.opts.delta==None: error("Missing required option n or delta") n=eval(self.opts.n) if len(n)!=3: error("Needs to be three elements, not",n) if nr!=n[0]*n[1]*n[2]: error("Subdomains",n,"inconsistent with processor number",nr) coeff["n"]="(%d %d %d)" % (n[0],n[1],n[2]) coeff["delta"]=float(self.opts.delta) if method=="hierarchical": if self.opts.order==None: error("Missing reuired option order") if len(self.opts.order)!=3: error("Order needs to be three characters") coeff["order"]=self.opts.order else: error("Method",method,"not yet implementes") gen=FoamFileGenerator(result) if self.opts.test: print_(str(gen)) return -1 else: f=open(path.join(case,"system","decomposeParDict"),"w") writeDictionaryHeader(f) f.write(str(gen)) f.close() if self.opts.clear: print_("Clearing processors") for p in glob(path.join(case,"processor*")): print_("Removing",p) rmtree(p,ignore_errors=True) self.checkAndCommit(SolutionDirectory(case,archive=None)) if self.opts.doDecompose: if self.opts.region: regionNames=self.opts.region[:] while True: try: i=regionNames.index("region0") regionNames[i]=None except ValueError: break else: regionNames=[None] regions=None sol=SolutionDirectory(case) if not decomposeParWithRegion: if self.opts.regions or self.opts.region!=None: print_("Building Pseudocases") regions=RegionCases(sol,clean=True,processorDirs=False) if self.opts.regions: regionNames=sol.getRegions(defaultRegion=True) for theRegion in regionNames: theCase=path.normpath(case) if theRegion!=None and not decomposeParWithRegion: theCase+="."+theRegion if oldApp(): argv=[self.opts.decomposer,".",theCase] else: argv=[self.opts.decomposer,"-case",theCase] if foamVersion()>=(2,0) and not self.opts.doFunctionObjects: argv+=["-noFunctionObjects"] if theRegion!=None and decomposeParWithRegion: argv+=["-region",theRegion] f=open(path.join(case,"system",theRegion,"decomposeParDict"),"w") writeDictionaryHeader(f) f.write(str(gen)) f.close() self.setLogname(default="Decomposer",useApplication=False) run=UtilityRunner(argv=argv, silent=self.opts.progress or self.opts.silent, logname=self.opts.logname, compressLog=self.opts.compress, server=self.opts.server, noLog=self.opts.noLog, logTail=self.opts.logTail, echoCommandLine=self.opts.echoCommandPrefix, jobId=self.opts.jobId) run.start() if theRegion!=None and not decomposeParWithRegion: print_("Syncing into master case") regions.resync(theRegion) if regions!=None and not decomposeParWithRegion: if not self.opts.keeppseudo: print_("Removing pseudo-regions") regions.cleanAll() else: for r in sol.getRegions(): if r not in regionNames: regions.clean(r) if self.opts.doConstantLinks: print_("Adding symlinks in the constant directories") constPath=path.join(case,"constant") for f in listdir(constPath): srcExpr=path.join(path.pardir,path.pardir,"constant",f) for p in range(nr): dest=path.join(case,"processor%d"%p,"constant",f) if not path.exists(dest): symlink(srcExpr,dest) self.addToCaseLog(case)
def main(): parser=OptionParser(usage= usage, version=version) parser.add_option("-q", "--quiet", action="store_true", dest="quiet", default=False, help="Only print warnings and errors") parser.add_option("-t", "--template", action="store",dest="controlfile",default=None, help="Generate default controlfile") parser.add_option("-l", "--logfile", action="store",dest="logfile",default=None, help="Writes output to logfile") parser.add_option("-d", "--debug", action="store_true",dest="debug",default=False, help="Writes output to logfile") parser.add_option("-c", "--case", action="store",dest="case",default=None, help="Specifies case directory") (options, args) = parser.parse_args() rootLogger=logging.getLogger('') logger=logging.getLogger('canopyRunner') reportLevel=logging.INFO if options.quiet: reportLevel=logging.WARNING if options.debug: reportLevel=logging.DEBUG rootLogger.setLevel(reportLevel) if options.logfile==None: console=logging.StreamHandler() console.setLevel(reportLevel) formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') console.setFormatter(formatter) rootLogger.addHandler(console) if options.controlfile!=None: generateCf(path.abspath(options.controlfile)) print "Wrote default controlfile" sys.exit() if options.logfile!=None: logFileName=path.abspath(options.logfile) if not path.exists(path.dirname(logFileName)): print "Bad argument, directory for logfile does not exist" sys.exit() logfile=logging.FileHandler(logFileName,"w") logfile.setLevel(reportLevel) formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') logfile.setFormatter(formatter) rootLogger.addHandler(logfile) if len(args)!=1: parser.error("Incorrect number of arguments") cf=ControlFile.ControlFile(fileName=path.abspath(args[0])) if options.case!=None: casePath=path.abspath(options.case) else: casePath=os.getcwd() caseName=path.basename(casePath) ch=CaseHandler.CaseHandler(casePath) wspeeds=cf.findScalarList("wspeeds:",optional=False) wdirs=cf.findScalarList("wdirs:",optional=False) iterations=cf.findScalar("iterations:",optional=False) inletProfile_z0=cf.findScalar("z0:",optional=False) fieldsToArchive=cf.findStringList("fieldsToArchive:",optional=False) archiveDirName=cf.findString("flowArchiveDirName:",optional=False) restoreArchived=cf.findBoolean("restoreArchived:",optional=True,default=False) archiveVTK=cf.findBoolean("archiveVTK:",optional=False) VTKArchiveDir=cf.findExistingPath("VTKArchiveDir:",optional=False) nodes=int(cf.findScalar("nodes:",optional=False)) CPUs=cf.findScalar("CPUs:",optional=True) if CPUs==None: nprocesses=8*nodes else: nprocesses=int(CPUs) #----------------------------------- solver=cf.findString("solver:",default="windFoam") softStart=cf.findString("softstart_application:",optional=True) initCmds=cf.findStringList("initialize:",default=["setWindInlet"]) flowArchive=FoamArchive.FoamArchive(casePath,archiveDirName) nwdir= len(wdirs) convTable=ConvergenceTable.ConvergenceTable(casePath) logger.info("Running windRunner.py") logger.info("Setup overview:") logger.info(25*"-") logger.info("Case: "+ caseName) logger.info(25*"-") logger.info("Wind directions are: "+ str(wdirs)) logger.info("Wind speeds are: "+str(wspeeds)) nruns=nwdir*len(wspeeds) logger.info("Total number of runs: "+str(nruns)) logger.info(25*"-") logger.info("Number of iterations are: "+str(iterations)) logger.info("Number of nodes are: "+str(nodes)) logger.info("Fields to be archived: "+str(fieldsToArchive)) logger.info("ArchiveToVTK is set to: "+str(archiveVTK)) logger.info(50*"=") controlDict=ParameterFile(ch.controlDict()) windDict=ParameterFile(path.join(ch.constantDir(),"windDict")) RASDict=ParameterFile(path.join(ch.constantDir(),"RASProperties")) compression=controlDict.readParameter("writeCompression") if compression=="compressed" or compression=="on": filesToArchive=[field+".gz" for field in fieldsToArchive] else: filesToArchive=fieldsToArchive if not path.exists(VTKArchiveDir) and archiveVTK: logger.error("The VTKArchiveDir does not exist") sys.exit() #booting lammachine for parallell execution if nprocesses>1: Lam=LAMMachine(nr=nprocesses) Lam.writeScotch(ch) controlDict.replaceParameter("stopAt", "nextWrite") timeLeft=iterations*nruns*20 timeSpent=05 timeCase=iterations*20 timeEstimated=time.localtime(time.time()+timeLeft) casesRun=0 casesLeft=nruns ch.backUpInitialFields() logger.info("Backup made of initial fields") for wspeed in wspeeds: for wdir in wdirs: timeInit=time.time() controlDict.replaceParameter("writeInterval",str(iterations)) logger.info("Running calculations for dir: "+ str(wdir)+ " speed: "+ str(wspeed)) logger.info("Time left: "+str(timeLeft/60.0)+"min, Time spent: "+str(timeSpent/60.0)+"min") logger.info("Estimated time for finish: "+str(timeEstimated[:4])) logger.info("Cases finished: "+str(casesRun)+" cases left: "+str(casesLeft)) logger.info(" ") ch.clearResults() dirName="wspeed_"+str(wspeed)+"_wdir_"+str(wdir) logger.info("restoreArchived = "+str(restoreArchived)) if restoreArchived and flowArchive.inArchive(dirName=dirName): logger.info("Restoring archived flow fields") flowArchive.restore(dirName,fieldsToArchive,ch.initialDir()) for filename in fieldsToArchive: flowArchive.getFile(outputFile=path.join(ch.initialDir(),filename),fileName=filename,archiveDirName=dirName) logger.info("Restored archived flow fields!") else: logger.info("...Modifying bc:s") ch.modWindDir(ch.initialDir(),wdir) logger.info("bc:s modified!") logger.info("...Setting inlet profiles") windDict.replaceParameter("U10",str(wspeed)) windDict.replaceParameter("windDirection",str(wdir)) windDict.replaceParameter("z0",str(inletProfile_z0)) for initCmd in initCmds: initUtil=UtilityRunner(argv=[initCmd,"-case",casePath],silent=True,logname=initCmd) initUtil.start() if initUtil.runOK(): logger.info("Successfully finished: %s" %initCmd) else: logger.error("Error when running: %s" %initCmd) sys.exit() if nprocesses>1: if Lam.machineOK(): decomposeCmd="decomposePar" decomposeUtil=UtilityRunner(argv=[decomposeCmd,"-case",casePath],silent=True,logname="decomposePar") logger.info("...Decomposing case to run on"+str(Lam.cpuNr())+str(" of processors")) decomposeUtil.start() if decomposeUtil.runOK(): logger.info("Case decomposed!") else: logger.error("Error when running decomposePar") sys.exit() else: logger.error("Error: Could not start lam-machine") sys.exit() else: Lam=None logger.info("Serial Run chosen!") if softStart != None: RASDict.replaceParameter("RASModel","kEpsilon") controlDict.replaceParameter("stopAt","nextWrite") controlDict.replaceParameter("writeInterval","50") logger.info("...Softstarting using "+softStart) windFoamSolver = ConvergenceRunner(StandardLogAnalyzer(),argv=[softStart,"-case",casePath],silent=True,lam=Lam,logname=softStart) windFoamSolver.start() if windFoamSolver.runOK(): logger.info("Iterations finished for windFoam") else: logger.error("Error while running windFoam") sys.exit() RASDict.replaceParameter("RASModel","kEpsilon_canopy") controlDict.replaceParameter("writeInterval",str(iterations)) # The folowing line is to copy the landuse and LAD-files after the first iterations with simpleFoam ch.execute("for file in "+os.path.join(casePath,"processor*/0/[Ll]*") + r'; do for folder in ${file%0*}*; do [ -e ${folder}/`basename ${file}` ] || cp $file ${folder}/`basename ${file}`; done; done') logger.info("...Running solver for wind field") windFoamSolver = ConvergenceRunner(StandardLogAnalyzer(),argv=[solver,"-case",casePath],silent=True,lam=Lam,logname=solver) windFoamSolver.start() if windFoamSolver.runOK(): logger.info("Iterations finished for windFoam") else: logger.error("Error while running windFoam") sys.exit() if nprocesses>1: logger.info("Reconstructing decomposed case...") reconstructCmd="reconstructPar" reconstructUtil=UtilityRunner(argv=[reconstructCmd,"-latestTime","-case",casePath],silent=True,logname="reconstrucPar") reconstructUtil.start() if reconstructUtil.runOK(): logger.info("recunstruction ready!") else: logger.error("Error while running recontructPar") sys.exit() logger.info("Removing decomposed mesh") ch.execute("rm -r "+os.path.join(casePath,"processor*")) logger.info("Removed decomposed mesh!") convTable.addResidual("wd_"+str(wdir)+"_ws_"+str(wspeed),solver,"linear_Ux",casesRun+1) convTable.addResidual("wd_"+str(wdir)+"_ws_"+str(wspeed),solver,"linear_Uy",casesRun+1) convTable.addResidual("wd_"+str(wdir)+"_ws_"+str(wspeed),solver,"linear_k",casesRun+1) convTable.addResidual("wd_"+str(wdir)+"_ws_"+str(wspeed),solver,"linear_epsilon",casesRun+1) convTable.addProbes("wd_"+str(wdir)+"_ws_"+str(wspeed),"U",casesRun+1) convTable.addProbes("wd_"+str(wdir)+"_ws_"+str(wspeed),"k",casesRun+1) convTable.addProbes("wd_"+str(wdir)+"_ws_"+str(wspeed),"epsilon",casesRun+1) convTable.addProbes("wd_"+str(wdir)+"_ws_"+str(wspeed),"p",casesRun+1) logger.info("Archiving results from directory: %s" %ch.latestDir()) #save latest concentration result files solFiles=[file for file in os.listdir(ch.latestDir()) if file in filesToArchive] for filename in solFiles: dirName= "wspeed_"+str(wspeed)+"_wdir_"+str(wdir) flowArchive.addFile(path.join(ch.latestDir(),filename),dirName=dirName) if archiveVTK: #Creating a temporary last time directory to be used by foamToVTK VTKTime=str(eval(path.basename(ch.latestDir()))+1) newTimeDir=path.join(casePath,VTKTime) os.mkdir(newTimeDir) for filename in solFiles: oldFile=path.join(casePath,str(eval(ch.getLast())-1),filename) ch.execute("cp "+oldFile+" "+newTimeDir+"/") foamToVTKUtil=UtilityRunner(argv=["foamToVTK","-case",casePath,"-time "+VTKTime],silent=True,logname="foamToVTK") foamToVTKUtil.start() if foamToVTKUtil.runOK(): ch.execute("mv "+path.join(casePath,"VTK")+" "+path.join(VTKArchiveDir,"VTK"+"_wspeed_"+str(wspeed)+"_wdir_"+str(wdir))) ch.execute("rm -r "+path.join(casePath,VTKTime) ) logger.info("Exported to VTK archive!") else: logger.error("Error when exporting to VTK") sys.exit() logger.info("Finished wdir: "+ str(wdir)+ " wspeed: "+ str(wspeed)+ "Last iter = "+ch.getLast()) logger.info(" ") casesRun+=1 casesLeft-=1 timeCase=time.time()-timeInit timeSpent+=timeCase timeLeft=casesLeft*timeCase timeEstimated=time.localtime(time.time()+timeLeft) ch.clearResults() logger.info("Cleared all result directories exept: %s" %(" ".join(ch.getTimes() ) )) ch.restoreInitialFields() logger.info("Restored initital fields from backup copy") #restoring windData dictionary to original state windDict.purgeFile() convTable.writeProbes() convTable.writeResiduals() logger.info("Residuals and probes from solver windFoam written to case/convergence directory") #Restoring controlDict to original state controlDict.purgeFile() logger.info("Finished batch calculation!")