def execute(self, para, log): argv = [self.utility, ".", para['case']] + self.options.split() print_(" Executing and analyzing", " ".join(argv), end=" ") sys.stdout.flush() run = UtilityRunner(argv, silent=True, lam=Command.parallel, logname="_".join(argv)) run.add("data", self.regexp) run.start() data = run.analyzer.getData("data") result = None if data != None: result = [] for a in data: result.append(a) if result == None: print_("no data", end=" ") else: print_(result, end=" ") if run.runOK(): print_() else: print_("---> there was a problem") return run.runOK(), result
def execute(self,para,log): argv=[self.utility,".",para['case']]+self.options.split() print_(" Executing and analyzing"," ".join(argv),end=" ") sys.stdout.flush() run=UtilityRunner(argv,silent=True,lam=Command.parallel,logname="_".join(argv)) run.add("data",self.regexp) run.start() data=run.analyzer.getData("data") result=None if data!=None: result=[] for a in data: result.append(a) if result==None: print_("no data",end=" ") else: print_(result,end=" ") if run.runOK(): print_() else: print_("---> there was a problem") return run.runOK(),result
def main(): parser = OptionParser(usage=usage, version=version) parser.add_option("-q", "--quiet", action="store_true", dest="quiet", default=False, help="Only print warnings and errors") parser.add_option("-t", "--template", action="store", dest="controlfile", default=None, help="Generate default controlfile") parser.add_option("-l", "--logfile", action="store", dest="logfile", default=None, help="Writes output to logfile") parser.add_option("-d", "--debug", action="store_true", dest="debug", default=False, help="Writes output to logfile") parser.add_option("-c", "--case", action="store", dest="case", default=None, help="Specifies case directory") (options, args) = parser.parse_args() rootLogger = logging.getLogger('') logger = logging.getLogger('windRunner') reportLevel = logging.INFO if options.quiet: reportLevel = logging.WARNING if options.debug: reportLevel = logging.DEBUG rootLogger.setLevel(reportLevel) if options.logfile is None: console = logging.StreamHandler() console.setLevel(reportLevel) formatter = logging.Formatter( '%(name)-12s: %(levelname)-8s %(message)s') console.setFormatter(formatter) rootLogger.addHandler(console) if options.controlfile is not None: generateCf(options.controlfile, defaultCf) print "Wrote default controlfile" sys.exit(0) if options.logfile is not None: logFileName = path.abspath(options.logfile) if not path.exists(path.dirname(logFileName)): print "Bad argument, directory for logfile does not exist" sys.exit(1) logfile = logging.FileHandler(logFileName, "w") logfile.setLevel(reportLevel) formatter = logging.Formatter( '%(name)-12s: %(levelname)-8s %(message)s') logfile.setFormatter(formatter) rootLogger.addHandler(logfile) if len(args) != 1: parser.error("Incorrect number of arguments") cf = ControlFile.ControlFile(fileName=path.abspath(args[0])) if options.case is not None: casePath = path.abspath(options.case) else: casePath = os.getcwd() caseName = path.basename(casePath) ch = CaseHandler.CaseHandler(casePath) wspeeds = cf.findScalarList("wspeeds:", optional=False) wdirs = cf.findScalarList("wdirs:", optional=False) iterations = cf.findScalar("iterations:", optional=False) inlet_z0 = cf.findScalarList("z0:", optional=False) z0Dict = {} for i, wdir in enumerate(wdirs): z0Dict[wdir] = inlet_z0[i] fieldsToArchive = cf.findStringList("fieldsToArchive:", optional=False) archiveDirName = cf.findString("flowArchiveDirName:", optional=False) restoreArchived = cf.findBoolean("restoreArchived:", optional=True, default=False) nodes = int(cf.findScalar("nodes:", optional=False)) CPUs = cf.findScalar("CPUs:", optional=True) if CPUs is None: nprocesses = 16 * nodes else: nprocesses = int(CPUs) # ----------------------------------- solver = cf.findString("solver:", default="windFoam") initCmds = cf.findStringList("initialize:", default=["setLanduse"]) flowArchive = FoamArchive.FoamArchive(casePath, archiveDirName) nwdir = len(wdirs) convTable = ConvergenceTable.ConvergenceTable(casePath) logger.info("Running windRunner.py") logger.info("Setup overview:") logger.info(25 * "-") logger.info("Case: " + caseName) logger.info(25 * "-") logger.info("Wind directions are: " + str(wdirs)) logger.info("Wind speeds are: " + str(wspeeds)) nruns = nwdir * len(wspeeds) logger.info("Total number of runs: " + str(nruns)) logger.info(25 * "-") logger.info("Number of iterations are: " + str(iterations)) logger.info("Number of nodes are: " + str(nodes)) logger.info("Fields to be archived: " + str(fieldsToArchive)) logger.info(50 * "=") controlDict = ParameterFile(ch.controlDict()) # uses include file from 0/include ABLConditions = ParameterFile( path.join(ch.name, '0', 'include', 'ABLConditions')) compression = controlDict.readParameter("writeCompression") if compression == "compressed" or compression == "on": filesToArchive = [field + ".gz" for field in fieldsToArchive] else: filesToArchive = fieldsToArchive # booting lammachine for parallell execution if nprocesses > 1: Lam = LAMMachine(nr=nprocesses) Lam.writeScotch(ch) controlDict.replaceParameter("stopAt", "nextWrite") timeLeft = iterations * nruns * 20 timeSpent = 05 timeCase = iterations * 20 timeEstimated = time.localtime(time.time() + timeLeft) casesRun = 0 casesLeft = nruns ch.backUpInitialFields() logger.info("Backup made of initial fields") for wspeed in wspeeds: for wdir in wdirs: timeInit = time.time() controlDict.replaceParameter("writeInterval", str(iterations)) logger.info("Running calculations for dir: " + str(wdir) + " speed: " + str(wspeed)) logger.info("Time left: " + str(timeLeft / 60.0) + "min, Time spent: " + str(timeSpent / 60.0) + "min") logger.info("Estimated time for finish: " + str(timeEstimated[:4])) logger.info("Cases finished: " + str(casesRun) + " cases left: " + str(casesLeft)) logger.info(" ") ch.clearResults() dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) logger.info("restoreArchived = " + str(restoreArchived)) if flowArchive.inArchive(dirName=dirName) and not restoreArchived: logger.info('Results already in archive, moving on...') casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) continue logger.info("...Modifying bc:s") ch.modWindDir(ch.initialDir(), wdir) logger.info("bc:s modified!") ABLConditions.replaceParameter("Uref", "%f" % wspeed) ABLConditions.replaceParameter("flowDir", dir2vec(wdir)) ABLConditions.replaceParameter("z0", 'uniform %f' % z0Dict[wdir]) for initCmd in initCmds: initUtil = UtilityRunner(argv=[initCmd, "-case", casePath], silent=True, logname=initCmd) initUtil.start() if initUtil.runOK(): logger.info("Successfully finished: %s" % initCmd) else: logger.error("Error when running: %s" % initCmd) sys.exit(1) if restoreArchived and \ flowArchive.inArchive(dirName=dirName): logger.info("Restoring archived flow fields") flowArchive.restore(dirName, ch.initialDir(), fieldsToArchive) for filename in fieldsToArchive: flowArchive.getFile(outputFile=path.join( ch.initialDir(), filename), fileName=filename, archiveDirName=dirName) logger.info("Restored archived flow fields!") if nprocesses > 1: if Lam.machineOK(): decomposeCmd = "decomposePar" decomposeUtil = UtilityRunner( argv=[decomposeCmd, "-case", casePath], silent=True, logname="decomposePar") logger.info("...Decomposing case to run on" + str(Lam.cpuNr()) + str(" of processors")) decomposeUtil.start() if decomposeUtil.runOK(): logger.info("Case decomposed!") else: logger.error("Error when running decomposePar") sys.exit() else: logger.error("Error: Could not start lam-machine") sys.exit() else: Lam = None logger.info("Serial Run chosen!") logger.info("...Running solver for wind field") windFoamSolver = ConvergenceRunner( StandardLogAnalyzer(), argv=[solver, "-case", casePath], silent=True, lam=Lam, logname=solver) windFoamSolver.start() if windFoamSolver.runOK(): logger.info("Iterations finished for solver") else: logger.error("Error while running solver") sys.exit() if nprocesses > 1: logger.info("Reconstructing decomposed case...") reconstructCmd = "reconstructPar" reconstructUtil = UtilityRunner( argv=[reconstructCmd, "-latestTime", "-case", casePath], silent=True, logname="reconstrucPar") reconstructUtil.start() if reconstructUtil.runOK(): logger.info("recunstruction ready!") else: logger.error("Error while running recontructPar") sys.exit(1) logger.info("Removing decomposed mesh") ch.execute("rm -r " + os.path.join(casePath, "processor*")) logger.info("Removed decomposed mesh!") convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_Ux", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_Uy", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_k", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_epsilon", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "U", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "k", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "epsilon", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "p", casesRun + 1) logger.info("Archiving results from directory: %s" % ch.latestDir()) # save latest concentration result files solFiles = [ file for file in os.listdir(ch.latestDir()) if file in filesToArchive ] for filename in solFiles: dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) flowArchive.addFile(path.join(ch.latestDir(), filename), dirName=dirName) logger.info("Finished wdir: " + str(wdir) + " wspeed: " + str(wspeed) + "Last iter = " + ch.getLast()) logger.info(" ") casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) ch.clearResults() logger.info("Cleared all result directories exept: %s" % (" ".join(ch.getTimes()))) ch.restoreInitialFields() logger.info("Restored initital fields from backup copy") # restoring windData dictionary to original state ABLConditions.purgeFile() convTable.writeProbes() convTable.writeResiduals() logger.info("Residuals and probes from solver " + "written to case/convergence directory") # Restoring controlDict to original state controlDict.purgeFile() logger.info("Finished batch calculation!")
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("-t", "--template", action="store", dest="template", help="Generate default controlfile") parser.add_argument( '-v', action=VerboseAction, dest='loglevel', default=logging.INFO, help='increase verbosity in terminal', ) parser.add_argument( '-l', metavar='logfile', action=LogFileAction, dest='logfile', help='write verbose output to logfile', ) parser.add_argument( "-c", "--case", action="store", dest="case", default=os.getcwd(), help="Specifies case directory (default is current workdir)", ) parser.add_argument(action="store", dest="controlfile", help="Controlfile for speciesRunner") args = parser.parse_args() if args.template is not None: generateCf(args.template, defaultCf) log.info('Wrote default controlfile') sys.exit(0) cf = ControlFile.ControlFile(fileName=args.controlfile) caseName = path.basename(args.case) ch = CaseHandler.CaseHandler(args.case) wspeeds = cf.findScalarList("wspeeds:", optional=False) wdirs = cf.findScalarList("wdirs:", optional=False) iterations = cf.findScalar("iterations:", optional=False) fieldsToArchive = cf.findStringList("fieldsToArchive:", optional=False) flowArchiveDirName = cf.findString("flowArchiveDirName:", optional=True, default='flowArchive') concArchiveDirName = cf.findString("concArchiveDirName:", optional=True, default='concArchive') restoreArchived = cf.findBoolean("restoreArchived:", optional=True, default=False) nodes = int(cf.findScalar("nodes:", optional=False)) CPUs = cf.findScalar("CPUs:", optional=True) if CPUs is None: nprocesses = CORES_PER_NODE * nodes else: nprocesses = int(CPUs) #----------------------------------- solver = cf.findString("solver:", default="speciesFoam") initCmds = cf.findStringList("initialize:", default=[], optional=True) flowArchive = FoamArchive.FoamArchive(args.case, flowArchiveDirName) concArchive = FoamArchive.FoamArchive(args.case, concArchiveDirName) nwdir = len(wdirs) convTable = ConvergenceTable.ConvergenceTable(args.case) log.info("Running speciesFoam") log.info("Setup overview:") log.info(25 * "-") log.info("Case: " + caseName) log.info(25 * "-") log.info("Wind directions are: " + str(wdirs)) log.info("Wind speeds are: " + str(wspeeds)) nruns = nwdir * len(wspeeds) log.info("Total number of runs: " + str(nruns)) log.info(25 * "-") log.info("Number of iterations are: " + str(iterations)) log.info("Number of nodes are: " + str(nodes)) log.info("Fields to be archived: " + str(fieldsToArchive)) log.info(50 * "=") controlDict = ParameterFile(ch.controlDict()) statisticsDict = ExtendedParameterFile.ExtendedParameterFile( path.join(ch.systemDir(), "statisticsDict")) if controlDict.readParameter("writeCompression") == "compressed": filesToArchive = [field + ".gz" for field in fieldsToArchive] flowFiles = [field + ".gz" for field in FLOW_FILES] else: filesToArchive = fieldsToArchive flowFiles = FLOW_FILES # booting lammachine for parallell execution if nprocesses > 1: Lam = LAMMachine(nr=nprocesses) Lam.writeScotch(ch) controlDict.replaceParameter("stopAt", "nextWrite") timeLeft = iterations * nruns * 20 timeSpent = 05 timeCase = iterations * 20 timeEstimated = time.localtime(time.time() + timeLeft) casesRun = 0 casesLeft = nruns log.info("Backing up initial fields") ch.backUpInitialFields() log.info("Backup made of initial fields") for wspeed in wspeeds: for wdir in wdirs: timeInit = time.time() dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) if concArchive.inArchive(dirName=dirName) and not restoreArchived: log.info( 'Results already in concentration archive, moving on...') casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) continue if not flowArchive.inArchive(dirName=dirName): log.warning("Missing flow files in dir: %s, moving on..." % dirName) casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) continue controlDict.replaceParameter("writeInterval", str(iterations)) log.info("Running calculations for dir: " + str(wdir) + " speed: " + str(wspeed)) log.info("Time left: " + str(timeLeft / 60.0) + "min, Time spent: " + str(timeSpent / 60.0) + "min") log.info("Estimated time for finish: " + str(timeEstimated[:4])) log.info("Cases finished: " + str(casesRun) + " cases left: " + str(casesLeft)) log.info(" ") ch.clearResults() log.info("...Modifying bc:s") for f in flowFiles: ch.execute("rm " + path.join(ch.initialDir(), f)) ch.modWindDir(ch.initialDir(), wdir) log.info("bc:s modified!") log.info("Restoring archived flow fields") flowArchive.restore(dirName, ch.initialDir(), flowFiles) # for filename in flowFiles: # flowArchive.getFile( # outputFile=path.join(ch.initialDir(), filename), # fileName=filename, archiveDirName=dirName # ) log.info("Restored archived flow fields!") for initCmd in initCmds: initUtil = UtilityRunner(argv=[initCmd, "-case", args.case], silent=True, logname=initCmd) initUtil.start() if initUtil.runOK(): log.info("Successfully finished: %s" % initCmd) else: log.error("Error when running: %s" % initCmd) sys.exit(1) if nprocesses > 1: if Lam.machineOK(): decomposeCmd = "decomposePar" decomposeUtil = UtilityRunner( argv=[decomposeCmd, "-case", args.case], silent=True, logname="decomposePar") log.info("Decomposing case for %i processors" % Lam.cpuNr()) decomposeUtil.start() if decomposeUtil.runOK(): log.info("Case decomposed!") else: log.error("Error when running decomposePar") sys.exit() else: log.error("Error: Could not start lam-machine") sys.exit() else: Lam = None log.info("Serial Run chosen!") log.info("...Running solver for species") FoamSolver = ConvergenceRunner(StandardLogAnalyzer(), argv=[solver, "-case", args.case], silent=True, lam=Lam, logname=solver) FoamSolver.start() if FoamSolver.runOK(): log.info("Iterations finished for speciesFoam") else: log.error("Error while running speciesFoam") sys.exit() if nprocesses > 1: log.info("Reconstructing decomposed case...") reconstructCmd = "reconstructPar" reconstructUtil = UtilityRunner( argv=[reconstructCmd, "-case", args.case], silent=True, logname="reconstrucPar") reconstructUtil.start() if reconstructUtil.runOK(): log.info("recunstruction ready!") else: log.error("Error while running recontructPar") sys.exit() log.info("Removing decomposed mesh") ch.execute("rm -r " + os.path.join(args.case, "processor*")) log.info("Removed decomposed mesh!") iterationsReady = (int(ch.getLast()) - int(path.basename(ch.initialDir()))) if iterationsReady < iterations: log.warning("Run was aborted before finalizing" + " the wanted number of iterations") log.warning("Guessing that nan:s were present in results. " + "Removing results from current run and moving on") log.info("Archiving results") # save latest concentration result files solFiles = [ f for f in os.listdir(ch.latestDir()) if f[:4] == "spec" and f[:12] != "spec_default" and ".bak" not in f and "~" not in f and "#" not in f ] for filename in solFiles: dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) concArchive.addFile(path.join(ch.latestDir(), filename), dirName=dirName) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), "speciesFoam", "linear_" + filename, casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), filename, casesRun + 1) log.info("Residuals and probes from solver " + "speciesFoam added to convergence table") # Adding the list of names of archived concentration # files to the statisticsDict dictionary archivedConcFiles = concArchive.listFilesInDirs("spec_") statisticsDict.replaceParameterList("concFileList", archivedConcFiles) log.info("Finished wdir: %f, wspeed: %f, Last iter: %s" % (wdir, wspeed, ch.getLast())) log.info(" ") casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) ch.clearResults() ch.restoreInitialFields() log.info("Restored initital fields") # restoring windData dictionary to original state convTable.writeProbes() convTable.writeResiduals() log.info("Residuals and probes from solver windFoam " + "written to case/convergence directory") # Restoring controlDict to original state controlDict.purgeFile() log.info("Finished batch calculation!")
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( "-t", "--template", action="store", dest="template", help="Generate default controlfile" ) parser.add_argument( '-v', action=VerboseAction, dest='loglevel', default=logging.INFO, help='increase verbosity in terminal', ) parser.add_argument( '-l', metavar='logfile', action=LogFileAction, dest='logfile', help='write verbose output to logfile', ) parser.add_argument( "-c", "--case", action="store", dest="case", default=os.getcwd(), help="Specifies case directory (default is current workdir)", ) parser.add_argument( action="store", dest="controlfile", help="Controlfile for speciesRunner" ) args = parser.parse_args() if args.template is not None: generateCf(args.template, defaultCf) log.info('Wrote default controlfile') sys.exit(0) cf = ControlFile.ControlFile(fileName=args.controlfile) caseName = path.basename(args.case) ch = CaseHandler.CaseHandler(args.case) wspeeds = cf.findScalarList("wspeeds:", optional=False) wdirs = cf.findScalarList("wdirs:", optional=False) iterations = cf.findScalar("iterations:", optional=False) fieldsToArchive = cf.findStringList("fieldsToArchive:", optional=False) flowArchiveDirName = cf.findString( "flowArchiveDirName:", optional=True, default='flowArchive' ) concArchiveDirName = cf.findString( "concArchiveDirName:", optional=True, default='concArchive' ) restoreArchived = cf.findBoolean( "restoreArchived:", optional=True, default=False ) nodes = int(cf.findScalar("nodes:", optional=False)) CPUs = cf.findScalar("CPUs:", optional=True) if CPUs is None: nprocesses = CORES_PER_NODE * nodes else: nprocesses = int(CPUs) #----------------------------------- solver = cf.findString("solver:", default="speciesFoam") initCmds = cf.findStringList("initialize:", default=[], optional=True) flowArchive = FoamArchive.FoamArchive(args.case, flowArchiveDirName) concArchive = FoamArchive.FoamArchive(args.case, concArchiveDirName) nwdir = len(wdirs) convTable = ConvergenceTable.ConvergenceTable(args.case) log.info("Running speciesFoam") log.info("Setup overview:") log.info(25 * "-") log.info("Case: " + caseName) log.info(25 * "-") log.info("Wind directions are: " + str(wdirs)) log.info("Wind speeds are: " + str(wspeeds)) nruns = nwdir * len(wspeeds) log.info("Total number of runs: " + str(nruns)) log.info(25 * "-") log.info("Number of iterations are: " + str(iterations)) log.info("Number of nodes are: " + str(nodes)) log.info("Fields to be archived: " + str(fieldsToArchive)) log.info(50 * "=") controlDict = ParameterFile(ch.controlDict()) statisticsDict = ExtendedParameterFile.ExtendedParameterFile( path.join(ch.systemDir(), "statisticsDict") ) if controlDict.readParameter("writeCompression") == "compressed": filesToArchive = [field + ".gz" for field in fieldsToArchive] flowFiles = [field + ".gz" for field in FLOW_FILES] else: filesToArchive = fieldsToArchive flowFiles = FLOW_FILES # booting lammachine for parallell execution if nprocesses > 1: Lam = LAMMachine(nr=nprocesses) Lam.writeScotch(ch) controlDict.replaceParameter("stopAt", "nextWrite") timeLeft = iterations * nruns * 20 timeSpent = 05 timeCase = iterations * 20 timeEstimated = time.localtime(time.time() + timeLeft) casesRun = 0 casesLeft = nruns log.info("Backing up initial fields") ch.backUpInitialFields() log.info("Backup made of initial fields") for wspeed in wspeeds: for wdir in wdirs: timeInit = time.time() dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) if concArchive.inArchive(dirName=dirName) and not restoreArchived: log.info('Results already in concentration archive, moving on...') casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) continue if not flowArchive.inArchive(dirName=dirName): log.warning("Missing flow files in dir: %s, moving on..." % dirName) casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) continue controlDict.replaceParameter("writeInterval", str(iterations)) log.info( "Running calculations for dir: " + str(wdir) + " speed: " + str(wspeed) ) log.info( "Time left: " + str(timeLeft / 60.0) + "min, Time spent: "+str(timeSpent / 60.0) + "min" ) log.info( "Estimated time for finish: " + str(timeEstimated[:4]) ) log.info( "Cases finished: " + str(casesRun) + " cases left: " + str(casesLeft) ) log.info(" ") ch.clearResults() log.info("...Modifying bc:s") for f in flowFiles: ch.execute("rm " + path.join(ch.initialDir(), f)) ch.modWindDir(ch.initialDir(), wdir) log.info("bc:s modified!") log.info("Restoring archived flow fields") flowArchive.restore(dirName, ch.initialDir(), flowFiles) # for filename in flowFiles: # flowArchive.getFile( # outputFile=path.join(ch.initialDir(), filename), # fileName=filename, archiveDirName=dirName # ) log.info("Restored archived flow fields!") for initCmd in initCmds: initUtil = UtilityRunner( argv=[initCmd, "-case", args.case], silent=True, logname=initCmd ) initUtil.start() if initUtil.runOK(): log.info( "Successfully finished: %s" % initCmd ) else: log.error( "Error when running: %s" % initCmd ) sys.exit(1) if nprocesses > 1: if Lam.machineOK(): decomposeCmd = "decomposePar" decomposeUtil = UtilityRunner( argv=[decomposeCmd, "-case", args.case], silent=True, logname="decomposePar" ) log.info( "Decomposing case for %i processors" % Lam.cpuNr() ) decomposeUtil.start() if decomposeUtil.runOK(): log.info("Case decomposed!") else: log.error("Error when running decomposePar") sys.exit() else: log.error("Error: Could not start lam-machine") sys.exit() else: Lam = None log.info("Serial Run chosen!") log.info("...Running solver for species") FoamSolver = ConvergenceRunner( StandardLogAnalyzer(), argv=[solver, "-case", args.case], silent=True, lam=Lam, logname=solver ) FoamSolver.start() if FoamSolver.runOK(): log.info("Iterations finished for speciesFoam") else: log.error("Error while running speciesFoam") sys.exit() if nprocesses > 1: log.info("Reconstructing decomposed case...") reconstructCmd = "reconstructPar" reconstructUtil = UtilityRunner( argv=[reconstructCmd, "-case", args.case], silent=True, logname="reconstrucPar" ) reconstructUtil.start() if reconstructUtil.runOK(): log.info("recunstruction ready!") else: log.error("Error while running recontructPar") sys.exit() log.info("Removing decomposed mesh") ch.execute( "rm -r " + os.path.join(args.case, "processor*") ) log.info("Removed decomposed mesh!") iterationsReady = ( int(ch.getLast()) - int(path.basename(ch.initialDir())) ) if iterationsReady < iterations: log.warning( "Run was aborted before finalizing" + " the wanted number of iterations" ) log.warning( "Guessing that nan:s were present in results. " + "Removing results from current run and moving on" ) log.info("Archiving results") # save latest concentration result files solFiles = [ f for f in os.listdir(ch.latestDir()) if f[:4] == "spec" and f[:12] != "spec_default" and ".bak" not in f and "~" not in f and "#" not in f ] for filename in solFiles: dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) concArchive.addFile( path.join(ch.latestDir(), filename), dirName=dirName ) convTable.addResidual( "wd_" + str(wdir) + "_ws_" + str(wspeed), "speciesFoam", "linear_" + filename, casesRun+1 ) convTable.addProbes( "wd_" + str(wdir) + "_ws_" + str(wspeed), filename, casesRun + 1 ) log.info( "Residuals and probes from solver " + "speciesFoam added to convergence table" ) # Adding the list of names of archived concentration # files to the statisticsDict dictionary archivedConcFiles = concArchive.listFilesInDirs("spec_") statisticsDict.replaceParameterList( "concFileList", archivedConcFiles ) log.info("Finished wdir: %f, wspeed: %f, Last iter: %s" % ( wdir, wspeed, ch.getLast()) ) log.info(" ") casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) ch.clearResults() ch.restoreInitialFields() log.info("Restored initital fields") # restoring windData dictionary to original state convTable.writeProbes() convTable.writeResiduals() log.info( "Residuals and probes from solver windFoam " + "written to case/convergence directory" ) # Restoring controlDict to original state controlDict.purgeFile() log.info("Finished batch calculation!")
def main(): parser = OptionParser(usage=usage, version=version) parser.add_option("-q", "--quiet", action="store_true", dest="quiet", default=False, help="Only print warnings and errors") parser.add_option("-t", "--template", action="store", dest="controlfile", default=None, help="Generate default controlfile") parser.add_option("-l", "--logfile", action="store", dest="logfile", default=None, help="Writes output to logfile") parser.add_option("-d", "--debug", action="store_true", dest="debug", default=False, help="Writes output to logfile") parser.add_option("-c", "--case", action="store", dest="case", default=None, help="Specifies case directory") (options, args) = parser.parse_args() rootLogger = logging.getLogger('') logger = logging.getLogger('canopyRunner') reportLevel = logging.INFO if options.quiet: reportLevel = logging.WARNING if options.debug: reportLevel = logging.DEBUG rootLogger.setLevel(reportLevel) if options.logfile == None: console = logging.StreamHandler() console.setLevel(reportLevel) formatter = logging.Formatter( '%(name)-12s: %(levelname)-8s %(message)s') console.setFormatter(formatter) rootLogger.addHandler(console) if options.controlfile != None: generateCf(path.abspath(options.controlfile)) print "Wrote default controlfile" sys.exit() if options.logfile != None: logFileName = path.abspath(options.logfile) if not path.exists(path.dirname(logFileName)): print "Bad argument, directory for logfile does not exist" sys.exit() logfile = logging.FileHandler(logFileName, "w") logfile.setLevel(reportLevel) formatter = logging.Formatter( '%(name)-12s: %(levelname)-8s %(message)s') logfile.setFormatter(formatter) rootLogger.addHandler(logfile) if len(args) != 1: parser.error("Incorrect number of arguments") cf = ControlFile.ControlFile(fileName=path.abspath(args[0])) if options.case != None: casePath = path.abspath(options.case) else: casePath = os.getcwd() caseName = path.basename(casePath) ch = CaseHandler.CaseHandler(casePath) wspeeds = cf.findScalarList("wspeeds:", optional=False) wdirs = cf.findScalarList("wdirs:", optional=False) iterations = cf.findScalar("iterations:", optional=False) inletProfile_z0 = cf.findScalar("z0:", optional=False) fieldsToArchive = cf.findStringList("fieldsToArchive:", optional=False) archiveDirName = cf.findString("flowArchiveDirName:", optional=False) restoreArchived = cf.findBoolean("restoreArchived:", optional=True, default=False) archiveVTK = cf.findBoolean("archiveVTK:", optional=False) VTKArchiveDir = cf.findExistingPath("VTKArchiveDir:", optional=False) nodes = int(cf.findScalar("nodes:", optional=False)) CPUs = cf.findScalar("CPUs:", optional=True) if CPUs == None: nprocesses = 8 * nodes else: nprocesses = int(CPUs) #----------------------------------- solver = cf.findString("solver:", default="windFoam") softStart = cf.findString("softstart_application:", optional=True) initCmds = cf.findStringList("initialize:", default=["setWindInlet"]) flowArchive = FoamArchive.FoamArchive(casePath, archiveDirName) nwdir = len(wdirs) convTable = ConvergenceTable.ConvergenceTable(casePath) logger.info("Running windRunner.py") logger.info("Setup overview:") logger.info(25 * "-") logger.info("Case: " + caseName) logger.info(25 * "-") logger.info("Wind directions are: " + str(wdirs)) logger.info("Wind speeds are: " + str(wspeeds)) nruns = nwdir * len(wspeeds) logger.info("Total number of runs: " + str(nruns)) logger.info(25 * "-") logger.info("Number of iterations are: " + str(iterations)) logger.info("Number of nodes are: " + str(nodes)) logger.info("Fields to be archived: " + str(fieldsToArchive)) logger.info("ArchiveToVTK is set to: " + str(archiveVTK)) logger.info(50 * "=") controlDict = ParameterFile(ch.controlDict()) windDict = ParameterFile(path.join(ch.constantDir(), "windDict")) RASDict = ParameterFile(path.join(ch.constantDir(), "RASProperties")) compression = controlDict.readParameter("writeCompression") if compression == "compressed" or compression == "on": filesToArchive = [field + ".gz" for field in fieldsToArchive] else: filesToArchive = fieldsToArchive if not path.exists(VTKArchiveDir) and archiveVTK: logger.error("The VTKArchiveDir does not exist") sys.exit() #booting lammachine for parallell execution if nprocesses > 1: Lam = LAMMachine(nr=nprocesses) Lam.writeScotch(ch) controlDict.replaceParameter("stopAt", "nextWrite") timeLeft = iterations * nruns * 20 timeSpent = 05 timeCase = iterations * 20 timeEstimated = time.localtime(time.time() + timeLeft) casesRun = 0 casesLeft = nruns ch.backUpInitialFields() logger.info("Backup made of initial fields") for wspeed in wspeeds: for wdir in wdirs: timeInit = time.time() controlDict.replaceParameter("writeInterval", str(iterations)) logger.info("Running calculations for dir: " + str(wdir) + " speed: " + str(wspeed)) logger.info("Time left: " + str(timeLeft / 60.0) + "min, Time spent: " + str(timeSpent / 60.0) + "min") logger.info("Estimated time for finish: " + str(timeEstimated[:4])) logger.info("Cases finished: " + str(casesRun) + " cases left: " + str(casesLeft)) logger.info(" ") ch.clearResults() dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) logger.info("restoreArchived = " + str(restoreArchived)) if restoreArchived and flowArchive.inArchive(dirName=dirName): logger.info("Restoring archived flow fields") flowArchive.restore(dirName, fieldsToArchive, ch.initialDir()) for filename in fieldsToArchive: flowArchive.getFile(outputFile=path.join( ch.initialDir(), filename), fileName=filename, archiveDirName=dirName) logger.info("Restored archived flow fields!") else: logger.info("...Modifying bc:s") ch.modWindDir(ch.initialDir(), wdir) logger.info("bc:s modified!") logger.info("...Setting inlet profiles") windDict.replaceParameter("U10", str(wspeed)) windDict.replaceParameter("windDirection", str(wdir)) windDict.replaceParameter("z0", str(inletProfile_z0)) for initCmd in initCmds: initUtil = UtilityRunner(argv=[initCmd, "-case", casePath], silent=True, logname=initCmd) initUtil.start() if initUtil.runOK(): logger.info("Successfully finished: %s" % initCmd) else: logger.error("Error when running: %s" % initCmd) sys.exit() if nprocesses > 1: if Lam.machineOK(): decomposeCmd = "decomposePar" decomposeUtil = UtilityRunner( argv=[decomposeCmd, "-case", casePath], silent=True, logname="decomposePar") logger.info("...Decomposing case to run on" + str(Lam.cpuNr()) + str(" of processors")) decomposeUtil.start() if decomposeUtil.runOK(): logger.info("Case decomposed!") else: logger.error("Error when running decomposePar") sys.exit() else: logger.error("Error: Could not start lam-machine") sys.exit() else: Lam = None logger.info("Serial Run chosen!") if softStart != None: RASDict.replaceParameter("RASModel", "kEpsilon") controlDict.replaceParameter("stopAt", "nextWrite") controlDict.replaceParameter("writeInterval", "50") logger.info("...Softstarting using " + softStart) windFoamSolver = ConvergenceRunner( StandardLogAnalyzer(), argv=[softStart, "-case", casePath], silent=True, lam=Lam, logname=softStart) windFoamSolver.start() if windFoamSolver.runOK(): logger.info("Iterations finished for windFoam") else: logger.error("Error while running windFoam") sys.exit() RASDict.replaceParameter("RASModel", "kEpsilon_canopy") controlDict.replaceParameter("writeInterval", str(iterations)) # The folowing line is to copy the landuse and LAD-files after the first iterations with simpleFoam ch.execute( "for file in " + os.path.join(casePath, "processor*/0/[Ll]*") + r'; do for folder in ${file%0*}*; do [ -e ${folder}/`basename ${file}` ] || cp $file ${folder}/`basename ${file}`; done; done' ) logger.info("...Running solver for wind field") windFoamSolver = ConvergenceRunner( StandardLogAnalyzer(), argv=[solver, "-case", casePath], silent=True, lam=Lam, logname=solver) windFoamSolver.start() if windFoamSolver.runOK(): logger.info("Iterations finished for windFoam") else: logger.error("Error while running windFoam") sys.exit() if nprocesses > 1: logger.info("Reconstructing decomposed case...") reconstructCmd = "reconstructPar" reconstructUtil = UtilityRunner( argv=[reconstructCmd, "-latestTime", "-case", casePath], silent=True, logname="reconstrucPar") reconstructUtil.start() if reconstructUtil.runOK(): logger.info("recunstruction ready!") else: logger.error("Error while running recontructPar") sys.exit() logger.info("Removing decomposed mesh") ch.execute("rm -r " + os.path.join(casePath, "processor*")) logger.info("Removed decomposed mesh!") convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_Ux", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_Uy", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_k", casesRun + 1) convTable.addResidual("wd_" + str(wdir) + "_ws_" + str(wspeed), solver, "linear_epsilon", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "U", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "k", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "epsilon", casesRun + 1) convTable.addProbes("wd_" + str(wdir) + "_ws_" + str(wspeed), "p", casesRun + 1) logger.info("Archiving results from directory: %s" % ch.latestDir()) #save latest concentration result files solFiles = [ file for file in os.listdir(ch.latestDir()) if file in filesToArchive ] for filename in solFiles: dirName = "wspeed_" + str(wspeed) + "_wdir_" + str(wdir) flowArchive.addFile(path.join(ch.latestDir(), filename), dirName=dirName) if archiveVTK: #Creating a temporary last time directory to be used by foamToVTK VTKTime = str(eval(path.basename(ch.latestDir())) + 1) newTimeDir = path.join(casePath, VTKTime) os.mkdir(newTimeDir) for filename in solFiles: oldFile = path.join(casePath, str(eval(ch.getLast()) - 1), filename) ch.execute("cp " + oldFile + " " + newTimeDir + "/") foamToVTKUtil = UtilityRunner( argv=["foamToVTK", "-case", casePath, "-time " + VTKTime], silent=True, logname="foamToVTK") foamToVTKUtil.start() if foamToVTKUtil.runOK(): ch.execute("mv " + path.join(casePath, "VTK") + " " + path.join( VTKArchiveDir, "VTK" + "_wspeed_" + str(wspeed) + "_wdir_" + str(wdir))) ch.execute("rm -r " + path.join(casePath, VTKTime)) logger.info("Exported to VTK archive!") else: logger.error("Error when exporting to VTK") sys.exit() logger.info("Finished wdir: " + str(wdir) + " wspeed: " + str(wspeed) + "Last iter = " + ch.getLast()) logger.info(" ") casesRun += 1 casesLeft -= 1 timeCase = time.time() - timeInit timeSpent += timeCase timeLeft = casesLeft * timeCase timeEstimated = time.localtime(time.time() + timeLeft) ch.clearResults() logger.info("Cleared all result directories exept: %s" % (" ".join(ch.getTimes()))) ch.restoreInitialFields() logger.info("Restored initital fields from backup copy") #restoring windData dictionary to original state windDict.purgeFile() convTable.writeProbes() convTable.writeResiduals() logger.info( "Residuals and probes from solver windFoam written to case/convergence directory" ) #Restoring controlDict to original state controlDict.purgeFile() logger.info("Finished batch calculation!")
def main(): parser=OptionParser(usage= usage, version=version) parser.add_option("-q", "--quiet", action="store_true", dest="quiet", default=False, help="Only print warnings and errors") parser.add_option("-t", "--template", action="store",dest="controlfile",default=None, help="Generate default controlfile") parser.add_option("-l", "--logfile", action="store",dest="logfile",default=None, help="Writes output to logfile") parser.add_option("-d", "--debug", action="store_true",dest="debug",default=False, help="Writes output to logfile") parser.add_option("-c", "--case", action="store",dest="case",default=None, help="Specifies case directory") (options, args) = parser.parse_args() rootLogger=logging.getLogger('') logger=logging.getLogger('canopyRunner') reportLevel=logging.INFO if options.quiet: reportLevel=logging.WARNING if options.debug: reportLevel=logging.DEBUG rootLogger.setLevel(reportLevel) if options.logfile==None: console=logging.StreamHandler() console.setLevel(reportLevel) formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') console.setFormatter(formatter) rootLogger.addHandler(console) if options.controlfile!=None: generateCf(path.abspath(options.controlfile)) print "Wrote default controlfile" sys.exit() if options.logfile!=None: logFileName=path.abspath(options.logfile) if not path.exists(path.dirname(logFileName)): print "Bad argument, directory for logfile does not exist" sys.exit() logfile=logging.FileHandler(logFileName,"w") logfile.setLevel(reportLevel) formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') logfile.setFormatter(formatter) rootLogger.addHandler(logfile) if len(args)!=1: parser.error("Incorrect number of arguments") cf=ControlFile.ControlFile(fileName=path.abspath(args[0])) if options.case!=None: casePath=path.abspath(options.case) else: casePath=os.getcwd() caseName=path.basename(casePath) ch=CaseHandler.CaseHandler(casePath) wspeeds=cf.findScalarList("wspeeds:",optional=False) wdirs=cf.findScalarList("wdirs:",optional=False) iterations=cf.findScalar("iterations:",optional=False) inletProfile_z0=cf.findScalar("z0:",optional=False) fieldsToArchive=cf.findStringList("fieldsToArchive:",optional=False) archiveDirName=cf.findString("flowArchiveDirName:",optional=False) restoreArchived=cf.findBoolean("restoreArchived:",optional=True,default=False) archiveVTK=cf.findBoolean("archiveVTK:",optional=False) VTKArchiveDir=cf.findExistingPath("VTKArchiveDir:",optional=False) nodes=int(cf.findScalar("nodes:",optional=False)) CPUs=cf.findScalar("CPUs:",optional=True) if CPUs==None: nprocesses=8*nodes else: nprocesses=int(CPUs) #----------------------------------- solver=cf.findString("solver:",default="windFoam") softStart=cf.findString("softstart_application:",optional=True) initCmds=cf.findStringList("initialize:",default=["setWindInlet"]) flowArchive=FoamArchive.FoamArchive(casePath,archiveDirName) nwdir= len(wdirs) convTable=ConvergenceTable.ConvergenceTable(casePath) logger.info("Running windRunner.py") logger.info("Setup overview:") logger.info(25*"-") logger.info("Case: "+ caseName) logger.info(25*"-") logger.info("Wind directions are: "+ str(wdirs)) logger.info("Wind speeds are: "+str(wspeeds)) nruns=nwdir*len(wspeeds) logger.info("Total number of runs: "+str(nruns)) logger.info(25*"-") logger.info("Number of iterations are: "+str(iterations)) logger.info("Number of nodes are: "+str(nodes)) logger.info("Fields to be archived: "+str(fieldsToArchive)) logger.info("ArchiveToVTK is set to: "+str(archiveVTK)) logger.info(50*"=") controlDict=ParameterFile(ch.controlDict()) windDict=ParameterFile(path.join(ch.constantDir(),"windDict")) RASDict=ParameterFile(path.join(ch.constantDir(),"RASProperties")) compression=controlDict.readParameter("writeCompression") if compression=="compressed" or compression=="on": filesToArchive=[field+".gz" for field in fieldsToArchive] else: filesToArchive=fieldsToArchive if not path.exists(VTKArchiveDir) and archiveVTK: logger.error("The VTKArchiveDir does not exist") sys.exit() #booting lammachine for parallell execution if nprocesses>1: Lam=LAMMachine(nr=nprocesses) Lam.writeScotch(ch) controlDict.replaceParameter("stopAt", "nextWrite") timeLeft=iterations*nruns*20 timeSpent=05 timeCase=iterations*20 timeEstimated=time.localtime(time.time()+timeLeft) casesRun=0 casesLeft=nruns ch.backUpInitialFields() logger.info("Backup made of initial fields") for wspeed in wspeeds: for wdir in wdirs: timeInit=time.time() controlDict.replaceParameter("writeInterval",str(iterations)) logger.info("Running calculations for dir: "+ str(wdir)+ " speed: "+ str(wspeed)) logger.info("Time left: "+str(timeLeft/60.0)+"min, Time spent: "+str(timeSpent/60.0)+"min") logger.info("Estimated time for finish: "+str(timeEstimated[:4])) logger.info("Cases finished: "+str(casesRun)+" cases left: "+str(casesLeft)) logger.info(" ") ch.clearResults() dirName="wspeed_"+str(wspeed)+"_wdir_"+str(wdir) logger.info("restoreArchived = "+str(restoreArchived)) if restoreArchived and flowArchive.inArchive(dirName=dirName): logger.info("Restoring archived flow fields") flowArchive.restore(dirName,fieldsToArchive,ch.initialDir()) for filename in fieldsToArchive: flowArchive.getFile(outputFile=path.join(ch.initialDir(),filename),fileName=filename,archiveDirName=dirName) logger.info("Restored archived flow fields!") else: logger.info("...Modifying bc:s") ch.modWindDir(ch.initialDir(),wdir) logger.info("bc:s modified!") logger.info("...Setting inlet profiles") windDict.replaceParameter("U10",str(wspeed)) windDict.replaceParameter("windDirection",str(wdir)) windDict.replaceParameter("z0",str(inletProfile_z0)) for initCmd in initCmds: initUtil=UtilityRunner(argv=[initCmd,"-case",casePath],silent=True,logname=initCmd) initUtil.start() if initUtil.runOK(): logger.info("Successfully finished: %s" %initCmd) else: logger.error("Error when running: %s" %initCmd) sys.exit() if nprocesses>1: if Lam.machineOK(): decomposeCmd="decomposePar" decomposeUtil=UtilityRunner(argv=[decomposeCmd,"-case",casePath],silent=True,logname="decomposePar") logger.info("...Decomposing case to run on"+str(Lam.cpuNr())+str(" of processors")) decomposeUtil.start() if decomposeUtil.runOK(): logger.info("Case decomposed!") else: logger.error("Error when running decomposePar") sys.exit() else: logger.error("Error: Could not start lam-machine") sys.exit() else: Lam=None logger.info("Serial Run chosen!") if softStart != None: RASDict.replaceParameter("RASModel","kEpsilon") controlDict.replaceParameter("stopAt","nextWrite") controlDict.replaceParameter("writeInterval","50") logger.info("...Softstarting using "+softStart) windFoamSolver = ConvergenceRunner(StandardLogAnalyzer(),argv=[softStart,"-case",casePath],silent=True,lam=Lam,logname=softStart) windFoamSolver.start() if windFoamSolver.runOK(): logger.info("Iterations finished for windFoam") else: logger.error("Error while running windFoam") sys.exit() RASDict.replaceParameter("RASModel","kEpsilon_canopy") controlDict.replaceParameter("writeInterval",str(iterations)) # The folowing line is to copy the landuse and LAD-files after the first iterations with simpleFoam ch.execute("for file in "+os.path.join(casePath,"processor*/0/[Ll]*") + r'; do for folder in ${file%0*}*; do [ -e ${folder}/`basename ${file}` ] || cp $file ${folder}/`basename ${file}`; done; done') logger.info("...Running solver for wind field") windFoamSolver = ConvergenceRunner(StandardLogAnalyzer(),argv=[solver,"-case",casePath],silent=True,lam=Lam,logname=solver) windFoamSolver.start() if windFoamSolver.runOK(): logger.info("Iterations finished for windFoam") else: logger.error("Error while running windFoam") sys.exit() if nprocesses>1: logger.info("Reconstructing decomposed case...") reconstructCmd="reconstructPar" reconstructUtil=UtilityRunner(argv=[reconstructCmd,"-latestTime","-case",casePath],silent=True,logname="reconstrucPar") reconstructUtil.start() if reconstructUtil.runOK(): logger.info("recunstruction ready!") else: logger.error("Error while running recontructPar") sys.exit() logger.info("Removing decomposed mesh") ch.execute("rm -r "+os.path.join(casePath,"processor*")) logger.info("Removed decomposed mesh!") convTable.addResidual("wd_"+str(wdir)+"_ws_"+str(wspeed),solver,"linear_Ux",casesRun+1) convTable.addResidual("wd_"+str(wdir)+"_ws_"+str(wspeed),solver,"linear_Uy",casesRun+1) convTable.addResidual("wd_"+str(wdir)+"_ws_"+str(wspeed),solver,"linear_k",casesRun+1) convTable.addResidual("wd_"+str(wdir)+"_ws_"+str(wspeed),solver,"linear_epsilon",casesRun+1) convTable.addProbes("wd_"+str(wdir)+"_ws_"+str(wspeed),"U",casesRun+1) convTable.addProbes("wd_"+str(wdir)+"_ws_"+str(wspeed),"k",casesRun+1) convTable.addProbes("wd_"+str(wdir)+"_ws_"+str(wspeed),"epsilon",casesRun+1) convTable.addProbes("wd_"+str(wdir)+"_ws_"+str(wspeed),"p",casesRun+1) logger.info("Archiving results from directory: %s" %ch.latestDir()) #save latest concentration result files solFiles=[file for file in os.listdir(ch.latestDir()) if file in filesToArchive] for filename in solFiles: dirName= "wspeed_"+str(wspeed)+"_wdir_"+str(wdir) flowArchive.addFile(path.join(ch.latestDir(),filename),dirName=dirName) if archiveVTK: #Creating a temporary last time directory to be used by foamToVTK VTKTime=str(eval(path.basename(ch.latestDir()))+1) newTimeDir=path.join(casePath,VTKTime) os.mkdir(newTimeDir) for filename in solFiles: oldFile=path.join(casePath,str(eval(ch.getLast())-1),filename) ch.execute("cp "+oldFile+" "+newTimeDir+"/") foamToVTKUtil=UtilityRunner(argv=["foamToVTK","-case",casePath,"-time "+VTKTime],silent=True,logname="foamToVTK") foamToVTKUtil.start() if foamToVTKUtil.runOK(): ch.execute("mv "+path.join(casePath,"VTK")+" "+path.join(VTKArchiveDir,"VTK"+"_wspeed_"+str(wspeed)+"_wdir_"+str(wdir))) ch.execute("rm -r "+path.join(casePath,VTKTime) ) logger.info("Exported to VTK archive!") else: logger.error("Error when exporting to VTK") sys.exit() logger.info("Finished wdir: "+ str(wdir)+ " wspeed: "+ str(wspeed)+ "Last iter = "+ch.getLast()) logger.info(" ") casesRun+=1 casesLeft-=1 timeCase=time.time()-timeInit timeSpent+=timeCase timeLeft=casesLeft*timeCase timeEstimated=time.localtime(time.time()+timeLeft) ch.clearResults() logger.info("Cleared all result directories exept: %s" %(" ".join(ch.getTimes() ) )) ch.restoreInitialFields() logger.info("Restored initital fields from backup copy") #restoring windData dictionary to original state windDict.purgeFile() convTable.writeProbes() convTable.writeResiduals() logger.info("Residuals and probes from solver windFoam written to case/convergence directory") #Restoring controlDict to original state controlDict.purgeFile() logger.info("Finished batch calculation!")