def getParallel(self,sol=None): """ @param sol: SolutionDirectory for which the LAMMachine will be constructed (with autosense) """ lam=None if self.opts.procnr!=None or self.opts.machinefile!=None: lam=LAMMachine(machines=self.opts.machinefile,nr=self.opts.procnr) elif self.opts.autosenseParallel and sol!=None: if sol.nrProcs()>1: lam=LAMMachine(nr=sol.nrProcs()) return lam
def execute(self,para,log): nr=int(replaceValues(self.cpus,para)) machines=replaceValues(self.hostfile,para) options=replaceValues(self.options,para) if nr>1: print_(" Decomposing for",nr,"CPUs") Decomposer(args=[para['case'],str(nr)]+options.split()+["--silent"]) Command.parallel=LAMMachine(nr=nr,machines=machines) else: print_(" No decomposition done") return True,None
def run(self): config=ConfigParser.ConfigParser() files=self.parser.getArgs() good=config.read(files) # will work with 2.4 # if len(good)!=len(files): # print "Problem while trying to parse files",files # print "Only ",good," could be parsed" # sys.exit(-1) benchName=config.get("General","name") if self.opts.nameAddition!=None: benchName+="_"+self.opts.nameAddition if self.opts.foamVersion!=None: benchName+="_v"+self.opts.foamVersion isParallel=config.getboolean("General","parallel") lam=None if isParallel: nrCpus=config.getint("General","nProcs") machineFile=config.get("General","machines") if not path.exists(machineFile): self.error("Machine file ",machineFile,"needed for parallel run") lam=LAMMachine(machineFile,nr=nrCpus) if lam.cpuNr()>nrCpus: self.error("Wrong number of CPUs: ",lam.cpuNr()) print "Running parallel on",lam.cpuNr(),"CPUs" if config.has_option("General","casesDirectory"): casesDirectory=path.expanduser(config.get("General","casesDirectory")) else: casesDirectory=foamTutorials() if not path.exists(casesDirectory): self.error("Directory",casesDirectory,"needed with the benchmark cases is missing") else: print "Using cases from directory",casesDirectory benchCases=[] config.remove_section("General") for sec in config.sections(): print "Reading: ",sec skipIt=False skipReason="" if config.has_option(sec,"skip"): skipIt=config.getboolean(sec,"skip") skipReason="Switched off in file" if self.opts.excases!=None and not skipIt: for p in self.opts.excases: if fnmatch(sec,p): skipIt=True skipReason="Switched off by pattern '"+p+"'" if self.opts.cases!=None: for p in self.opts.cases: if fnmatch(sec,p): skipIt=False skipReason="" if skipIt: print "Skipping case ..... Reason:"+skipReason continue sol=config.get(sec,"solver") cas=config.get(sec,"case") pre=eval(config.get(sec,"prepare")) preCon=[] if config.has_option(sec,"preControlDict"): preCon=eval(config.get(sec,"preControlDict")) con=eval(config.get(sec,"controlDict")) bas=config.getfloat(sec,"baseline") wei=config.getfloat(sec,"weight") add=[] if config.has_option(sec,"additional"): add=eval(config.get(sec,"additional")) print "Adding: ", add util=[] if config.has_option(sec,"utilities"): util=eval(config.get(sec,"utilities")) print "Utilities: ", util nr=99999 if config.has_option(sec,"nr"): nr=eval(config.get(sec,"nr")) sp=None if config.has_option(sec,"blockSplit"): sp=eval(config.get(sec,"blockSplit")) toRm=[] if config.has_option(sec,"filesToRemove"): toRm=eval(config.get(sec,"filesToRemove")) setInit=[] if config.has_option(sec,"setInitial"): setInit=eval(config.get(sec,"setInitial")) parallelOK=False if config.has_option(sec,"parallelOK"): parallelOK=config.getboolean(sec,"parallelOK") deMet=["metis"] if config.has_option(sec,"decomposition"): deMet=config.get(sec,"decomposition").split() if deMet[0]=="metis": pass elif deMet[0]=="simple": if len(deMet)<2: deMet.append(0) else: deMet[1]=int(deMet[1]) else: print "Unimplemented decomposition method",deMet[0],"switching to metis" deMet=["metis"] if isParallel==False or parallelOK==True: if path.exists(path.join(casesDirectory,sol,cas)): benchCases.append( (nr,sec,sol,cas,pre,con,preCon,bas,wei,add,util,sp,toRm,setInit,deMet) ) else: print "Skipping",sec,"because directory",path.join(casesDirectory,sol,cas),"could not be found" else: print "Skipping",sec,"because not parallel" benchCases.sort() parallelString="" if isParallel: parallelString=".cpus="+str(nrCpus) resultFile=open("Benchmark."+benchName+"."+uname()[1]+parallelString+".results","w") totalSpeedup=0 minSpeedup=None maxSpeedup=None totalWeight =0 runsOK=0 currentEstimate = 1. print "\nStart Benching\n" csv=CSVCollection("Benchmark."+benchName+"."+uname()[1]+parallelString+".csv") # csvHeaders=["description","solver","case","caseDir","base", # "benchmark","machine","arch","cpus","os","version", # "wallclocktime","cputime","cputimeuser","cputimesystem","maxmemory","cpuusage","speedup"] for nr,description,solver,case,prepare,control,preControl,base,weight,additional,utilities,split,toRemove,setInit,decomposition in benchCases: # control.append( ("endTime",-2000) ) print "Running Benchmark: ",description print "Solver: ",solver print "Case: ",case caseName=solver+"_"+case+"_"+benchName+"."+uname()[1]+".case" print "Short name: ",caseName caseDir=caseName+".runDir" csv["description"]=description csv["solver"]=solver csv["case"]=case csv["caseDir"]=caseDir csv["base"]=base csv["benchmark"]=benchName csv["machine"]=uname()[1] csv["arch"]=uname()[4] if lam==None: csv["cpus"]=1 else: csv["cpus"]=lam.cpuNr() csv["os"]=uname()[0] csv["version"]=uname()[2] workDir=path.realpath(path.curdir) orig=SolutionDirectory(path.join(casesDirectory,solver,case), archive=None, paraviewLink=False) for a in additional+utilities: orig.addToClone(a) orig.cloneCase(path.join(workDir,caseDir)) if oldApp(): argv=[solver,workDir,caseDir] else: argv=[solver,"-case",path.join(workDir,caseDir)] run=BasicRunner(silent=True,argv=argv,logname="BenchRunning",lam=lam) runDir=run.getSolutionDirectory() controlFile=ParameterFile(runDir.controlDict()) for name,value in preControl: print "Setting parameter",name,"to",value,"in controlDict" controlFile.replaceParameter(name,value) for rm in toRemove: fn=path.join(caseDir,rm) print "Removing file",fn remove(fn) for field,bc,val in setInit: print "Setting",field,"on",bc,"to",val SolutionFile(runDir.initialDir(),field).replaceBoundary(bc,val) oldDeltaT=controlFile.replaceParameter("deltaT",0) for u in utilities: print "Building utility ",u execute("wmake 2>&1 >%s %s" % (path.join(caseDir,"BenchCompile."+u),path.join(caseDir,u))) print "Preparing the case: " if lam!=None: prepare=prepare+[("decomposePar","")] if decomposition[0]=="metis": lam.writeMetis(SolutionDirectory(path.join(workDir,caseDir))) elif decomposition[0]=="simple": lam.writeSimple(SolutionDirectory(path.join(workDir,caseDir)),decomposition[1]) if split: print "Splitting the mesh:",split bm=BlockMesh(runDir.blockMesh()) bm.refineMesh(split) for pre,post in prepare: print "Doing ",pre," ...." post=post.replace("%case%",caseDir) if oldApp(): args=string.split("%s %s %s %s" % (pre,workDir,caseDir,post)) else: args=string.split("%s -case %s %s" % (pre,path.join(workDir,caseDir),post)) util=BasicRunner(silent=True,argv=args,logname="BenchPrepare_"+pre) util.start() controlFile.replaceParameter("deltaT",oldDeltaT) # control.append(("endTime",-1000)) for name,value in control: print "Setting parameter",name,"to",value,"in controlDict" controlFile.replaceParameter(name,value) print "Starting at ",asctime(localtime(time())) print " Baseline is %f, estimated speedup %f -> estimated end at %s " % (base,currentEstimate,asctime(localtime(time()+base/currentEstimate))) print "Running the case ...." run.start() speedup=None cpuUsage=0 speedupOut=-1 try: speedup=base/run.run.wallTime() cpuUsage=100.*run.run.cpuTime()/run.run.wallTime() except ZeroDivisionError: print "Division by Zero: ",run.run.wallTime() if not run.runOK(): print "\nWARNING!!!!" print "Run had a problem, not using the results. Check the log\n" speedup=None if speedup!=None: speedupOut=speedup totalSpeedup+=speedup*weight totalWeight +=weight runsOK+=1 if maxSpeedup==None: maxSpeedup=speedup elif speedup>maxSpeedup: maxSpeedup=speedup if minSpeedup==None: minSpeedup=speedup elif speedup<minSpeedup: minSpeedup=speedup print "Wall clock: ",run.run.wallTime() print "Speedup: ",speedup," (Baseline: ",base,")" print "CPU Time: ",run.run.cpuTime() print "CPU Time User: "******"CPU Time System: ",run.run.cpuSystemTime() print "Memory: ",run.run.usedMemory() print "CPU Usage: %6.2f%%" % (cpuUsage) csv["wallclocktime"]=run.run.wallTime() csv["cputime"]=run.run.cpuTime() csv["cputimeuser"]=run.run.cpuUserTime() csv["cputimesystem"]=run.run.cpuSystemTime() csv["maxmemory"]=run.run.usedMemory() csv["cpuusage"]=cpuUsage if speedup!=None: csv["speedup"]=speedup else: csv["speedup"]="##" csv.write() resultFile.write("Case %s WallTime %g CPUTime %g UserTime %g SystemTime %g Memory %g MB Speedup %g\n" %(caseName,run.run.wallTime(),run.run.cpuTime(),run.run.cpuUserTime(),run.run.cpuSystemTime(),run.run.usedMemory(),speedupOut)) resultFile.flush() if speedup!=None: currentEstimate=totalSpeedup/totalWeight if self.opts.removeCases: print "Clearing case", if speedup==None: print "not ... because it failed" else: print "completely" rmtree(caseDir,ignore_errors=True) print print if lam!=None: lam.stop() print "Total Speedup: ",currentEstimate," ( ",totalSpeedup," / ",totalWeight, " ) Range: [",minSpeedup,",",maxSpeedup,"]" print runsOK,"of",len(benchCases),"ran OK" resultFile.write("Total Speedup: %g\n" % (currentEstimate)) if minSpeedup and maxSpeedup: resultFile.write("Range: [ %g , %g ]\n" % (minSpeedup,maxSpeedup)) resultFile.close()
def run3dHillBase(template0, AR, z0, us, caseType): # loading other parameters from dictionary file inputDict = ParsedParameterFile("testZ0InfluenceDict") h = inputDict["simParams"]["h"] yM = inputDict["simParams"]["yM"] # SHM parameters cell = inputDict["SHMParams"]["cellSize"]["cell"] Href = inputDict["SHMParams"]["domainSize"]["domZ"] zz = inputDict["SHMParams"]["pointInDomain"]["zz"] # case definitions Martinez2DBump ks = 19.58 * z0 # [m] Martinez 2011 k = inputDict["kEpsParams"]["k"] Cmu = inputDict["kEpsParams"]["Cmu"] # yp/ks = 0.02 = x/ks hSample = inputDict["sampleParams"]["hSample"] procnr = multiprocessing.cpu_count() caseStr = "_z0_" + str(z0) target = "runs/" + template0 + caseStr x0, y0, phi = inputDict["SHMParams"]["centerOfDomain"]["x0"], inputDict["SHMParams"]["centerOfDomain"]["x0"], \ inputDict["SHMParams"]["flowOrigin"]["deg"]*pi/180 H = h a = h*AR #-------------------------------------------------------------------------------------- # cloning case #-------------------------------------------------------------------------------------- orig = SolutionDirectory(template0, archive=None, paraviewLink=False) work = orig.cloneCase(target) #-------------------------------------------------------------------------------------- # changing inlet profile - - - - according to Martinez 2010 #-------------------------------------------------------------------------------------- # change inlet profile Uref = Utop = us/k*math.log(Href/z0) # calculating turbulentKE TKE = us*us/math.sqrt(Cmu) # 1: changing ABLConditions bmName = path.join(work.initialDir(),"include/ABLConditions") template = TemplateFile(bmName+".template") template.writeToFile(bmName,{'us':us,'Uref':Uref,'Href':Href,'z0':z0,'xDirection':sin(phi),'yDirection':cos(phi)}) # 2: changing initialConditions bmName = path.join(work.initialDir(),"include/initialConditions") template = TemplateFile(bmName+".template") template.writeToFile(bmName,{'TKE':TKE}) # 3: changing initial and boundary conditions for new z0 # changing ks in nut, inside nutRoughWallFunction nutFile = ParsedParameterFile(path.join(work.initialDir(),"nut")) nutFile["boundaryField"]["ground"]["Ks"].setUniform(ks) nutFile["boundaryField"]["terrain_.*"]["Ks"].setUniform(ks) nutFile.writeFile() #-------------------------------------------------------------------------------------- # changing sample file #-------------------------------------------------------------------------------------- # 2: changing initialConditions bmName = path.join(work.systemDir(),"sampleDict") template = TemplateFile(bmName+".template") if AR>100:# flat terrain h=0 template.writeToFile(bmName,{'hillTopY':0,'sampleHeightAbovePlain':50,'sampleHeightAboveHill':50,'inletX':3500}) else: template.writeToFile(bmName,{'hillTopY':h,'sampleHeightAbovePlain':50,'sampleHeightAboveHill':h+50,'inletX':h*AR*4*0.9}) # if SHM - create mesh if caseType.find("SHM")>0: phi = phi - pi/180 * 90 #-------------------------------------------------------------------------------------- # creating blockMeshDict #-------------------------------------------------------------------------------------- l, d = a*inputDict["SHMParams"]["domainSize"]["fX"], a*inputDict["SHMParams"]["domainSize"]["fY"] x1 = x0 - (l/2*sin(phi) + d/2*cos(phi)) y1 = y0 - (l/2*cos(phi) - d/2*sin(phi)) x2 = x0 - (l/2*sin(phi) - d/2*cos(phi)) y2 = y0 - (l/2*cos(phi) + d/2*sin(phi)) x3 = x0 + (l/2*sin(phi) + d/2*cos(phi)) y3 = y0 + (l/2*cos(phi) - d/2*sin(phi)) x4 = x0 + (l/2*sin(phi) - d/2*cos(phi)) y4 = y0 + (l/2*cos(phi) + d/2*sin(phi)) n = floor(d/(cell*inputDict["SHMParams"]["cellSize"]["cellYfactor"])) m = floor(l/(cell*inputDict["SHMParams"]["cellSize"]["cellYfactor"])) q = floor((Href+450)/cell) # -450 is the minimum of the blockMeshDict.template - since that is slightly lower then the lowest point on the planet bmName = path.join(work.constantDir(),"polyMesh/blockMeshDict") template = TemplateFile(bmName+".template") template.writeToFile(bmName,{'X0':x1,'X1':x2,'X2':x3,'X3':x4,'Y0':y1,'Y1':y2,'Y2':y3,'Y3':y4,'Z0':Href,'n':int(n),'m':int(m),'q':int(q)}) #-------------------------------------------------------------------------------------- # running blockMesh #-------------------------------------------------------------------------------------- blockRun = BasicRunner(argv=["blockMesh",'-case',work.name],silent=True,server=False,logname="blockMesh") print "Running blockMesh" blockRun.start() if not blockRun.runOK(): error("there was an error with blockMesh") #-------------------------------------------------------------------------------------- # running SHM #-------------------------------------------------------------------------------------- print "calculating SHM parameters" # calculating refinement box positions l1, l2, h1, h2 = 2*a, 1.3*a, 4*H, 2*H # refinement rulls - Martinez 2011 refBox1_minx, refBox1_miny, refBox1_minz = x0 - l1*(sin(phi)+cos(phi)), y0 - l1*(cos(phi)-sin(phi)), 0 #enlarging to take acount of the rotation angle refBox1_maxx, refBox1_maxy, refBox1_maxz = x0 + l1*(sin(phi)+cos(phi)), y0 + l1*(cos(phi)-sin(phi)), h1 #enlarging to take acount of the rotation angle refBox2_minx, refBox2_miny, refBox2_minz = x0 - l2*(sin(phi)+cos(phi)), y0 - l2*(cos(phi)-sin(phi)), 0 #enlarging to take acount of the rotation angle refBox2_maxx, refBox2_maxy, refBox2_maxz = x0 + l2*(sin(phi)+cos(phi)), y0 + l2*(cos(phi)-sin(phi)),h2 #enlarging to take acount of the rotation angle # changing cnappyHexMeshDict - with parsedParameterFile SHMDict = ParsedParameterFile(path.join(work.systemDir(),"snappyHexMeshDict")) SHMDict["geometry"]["refinementBox1"]["min"] = "("+str(refBox1_minx)+" "+str(refBox1_miny)+" "+str(refBox1_minz)+")" SHMDict["geometry"]["refinementBox1"]["max"] = "("+str(refBox1_maxx)+" "+str(refBox1_maxy)+" "+str(refBox1_maxz)+")" SHMDict["geometry"]["refinementBox2"]["min"] = "("+str(refBox2_minx)+" "+str(refBox2_miny)+" "+str(refBox2_minz)+")" SHMDict["geometry"]["refinementBox2"]["max"] = "("+str(refBox2_maxx)+" "+str(refBox2_maxy)+" "+str(refBox2_maxz)+")" SHMDict["castellatedMeshControls"]["locationInMesh"] = "("+str(x0)+" "+str(y0)+" "+str(zz)+")" levelRef = inputDict["SHMParams"]["cellSize"]["levelRef"] SHMDict["castellatedMeshControls"]["refinementSurfaces"]["terrain"]["level"] = "("+str(levelRef)+" "+str(levelRef)+")" r = inputDict["SHMParams"]["cellSize"]["r"] SHMDict["addLayersControls"]["expansionRatio"] = r fLayerRatio = inputDict["SHMParams"]["cellSize"]["fLayerRatio"] SHMDict["addLayersControls"]["finalLayerThickness"] = fLayerRatio # calculating finalLayerRatio for getting zp_z0 = inputDict["SHMParams"]["cellSize"]["zp_z0"] firstLayerSize = 2*zp_z0*z0 L = math.log(fLayerRatio/firstLayerSize*cell/2**levelRef)/math.log(r)+1 SHMDict["addLayersControls"]["layers"]["terrain_solid"]["nSurfaceLayers"] = int(round(L)) SHMDict.writeFile() """ # changing snappyHexMeshDict - with template file snapName = path.join(work.systemDir(),"snappyHexMeshDict") template = TemplateFile(snapName+".template") template.writeToFile(snapName,{ 'refBox1_minx':refBox1_minx,'refBox1_miny':refBox1_miny,'refBox1_minz':refBox1_minz, 'refBox1_maxx':refBox1_maxx,'refBox1_maxy':refBox1_maxy,'refBox1_maxz':refBox1_maxz, 'refBox2_minx':refBox2_minx,'refBox2_miny':refBox2_miny,'refBox2_minz':refBox2_minz, 'refBox2_maxx':refBox2_maxx,'refBox2_maxy':refBox2_maxy,'refBox2_maxz':refBox2_maxz, 'locInMesh_x':x0,'locInMesh_y':y0,'locInMesh_z':zz}) """ # TODO - add parallel runs! SHMrun = BasicRunner(argv=["snappyHexMesh",'-overwrite','-case',work.name],server=False,logname="SHM") print "Running SHM" SHMrun.start() # mapping fields - From earlier result if exists if caseType.find("mapFields")>0: #TODO - fix mapping issue. mucho importante! #copying results from other z0 converged runs setName = glob.glob(target + 'Crude/sets/*') lastRun = range(len(setName)) for num in range(len(setName)): lastRun[num] = int(setName[num][setName[num].rfind("/")+1:]) sourceTimeArg = str(max(lastRun)) mapRun = BasicRunner(argv=['mapFields -consistent -sourceTime ' + sourceTimeArg + ' -case ' + work.name + ' ' + target + "Crude"],silent=True,server=False,logname='mapLog') mapRun.start() # parallel rule #print "Mesh has " + str(cells) + " cells" #if cells>100000: parallel=1 #else: parallel=0 parallel = 1 cluster = 1 if parallel: #-------------------------------------------------------------------------------------- # decomposing #-------------------------------------------------------------------------------------- # removing U.template from 0/ directory subprocess.call("rm " + bmName + ".template ",shell=True) arg = " -case " + work.name decomposeRun = BasicRunner(argv=["decomposePar -force" + arg],silent=True,server=False,logname="decompose") decomposeRun.start() #-------------------------------------------------------------------------------------- # running #-------------------------------------------------------------------------------------- machine = LAMMachine(nr=procnr) # run case PlotRunner(args=["--proc=%d"%procnr,"--progress","--no-continuity","--hardcopy", "--non-persist", "simpleFoam","-case",work.name]) #-------------------------------------------------------------------------------------- # reconstruct #------------------------------------------------------------------------- reconstructRun = BasicRunner(argv=["reconstructPar -latestTime" + arg],silent=True,server=False,logname="reconstructLog") reconstructRun.start() else: #-------------------------------------------------------------------------------------- # running #-------------------------------------------------------------------------------------- PlotRunner(args=["--progress","simpleFoam","-case",work.name]) # sample results dirNameList = glob.glob(target + "*") dirNameList.sort() for dirName in dirNameList: # sampling arg = " -case " + dirName + "/" sampleRun = BasicRunner(argv=["sample -latestTime" + arg],silent=True,server=False,logname="sampleLog") sampleRun.start() #finding the most converged run. setName = glob.glob(dirName + '/sets/*') lastRun = range(len(setName)) for num in range(len(setName)): lastRun[num] = int(setName[num][setName[num].rfind("/")+1:]) m = max(lastRun) p = lastRun.index(m) data_y = genfromtxt(setName[p] + '/line_y_U.xy',delimiter=' ') y, Ux_y, Uy_y = data_y[:,0], data_y[:,1], data_y[:,2] if AR<100: # if terrain isn't flat #TODO find the height of the hill - the exact one! because of truncation errors etc - #just follow the line measurements and look for the first place above 0 h = min(data_y[:,0]) y = y-h # normalizing data to height of hill-top above ground return y,Ux_y,Uy_y
#edit controlDict to account for change in U controlDict = ParsedParameterFile( path.join(clone_name, "system", "controlDict")) controlDict["functions"]["forcesCoeffs"]["liftDir"] = Vector( -sin(radians(angle)), cos(radians(angle)), 0) controlDict["functions"]["forcesCoeffs"]["dragDir"] = Vector( cos(radians(angle)), sin(radians(angle)), 0) controlDict["functions"]["forcesCoeffs"][ "magUInf"] = mach * speedOfSound controlDict.writeFile() #implement parallelization print('Decomposing...') Decomposer(args=['--progress', clone_name, num_procs]) CaseReport(args=['--decomposition', clone_name]) machine = LAMMachine(nr=num_procs) #run simpleFoam foamRun = BasicRunner(argv=[solver, "-case", clone_name], logname="simpleFoam") print("Running simpleFoam") foamRun.start() if not foamRun.runOK(): error("There was a problem with simpleFoam") #get headers and last line of postprocessing file with open( path.join(clone_name, 'postProcessing', 'forcesCoeffs', '0', 'coefficient.dat'), "rb") as table: last = table.readlines()[-1].decode() print("last line of coefficients" + last)
print "wow" SHMrun = BasicRunner(argv=["snappyHexMesh", '-overwrite', '-case', work.name], server=False, logname="SHM") print "Running SHM" SHMrun.start() sys.exit(6) #-------------------------------------------------------------------------------------- # running decomposePar #-------------------------------------------------------------------------------------- #-------------------------------------------------------------------------------------- # running SimpleFoam #-------------------------------------------------------------------------------------- machine = LAMMachine(nr=procnr) # run case PlotRunner(args=[ "--proc=%d" % procnr, "--with-all", "--progress", "simpleFoam", "-case", work.name ]) #PlotRunner(args=["simpleFoam","-parallel","- proc =% d " % procnr, "-case",work.name]) print "work.name = ", work.name Runner(args=["reconstructPar", "-latestTime", "-case", work.name]) # plotting if plotMartinez: import plotMartinez2DBump plotMartinez2DBump.main(target, hSample) plt.show() elif plotAll:
class BasicDuctRun(InitialTEST): """ CFD test problem 1) Construct the mesh 2) Run checkMesh on latest mesh 3) Run steady state case, (no optimisation) """ # class attributes # these attributes are likely to be the same accross all instances. #Solvers solver1 = "simpleFoam_cp" solver2 = "cartesianMesh" solver3 = "decomposePar" solver4 = "potentialFoam" solver5 = "renumberMesh" solver6 = "reconstructPar" #utilities checkingmesh = "checkMesh" machine = LAMMachine(nr=4) ncores = 4 #CostFunction postprocessing tools pCmd = "calcPressureDifference_Kaplan" mCmd = "calcEnergyLoss" stdout = sys.stdout def init(self): self.setParameters(solver=self.solver1, sizeClass=self.size_class, minimumRunTime=self.min_run_time, casePath=self.case_path) self.counter = 1 def prepare_case(self, source_case, verbose=False): if verbose: self.__prepare_case(source_case) else: with nostdout(): self.__prepare_case(source_case) def __prepare_case(self, source_case): # remove any previous case directory RemoveCase(self.case_path) # restore case from source before running for the first time RestoreCase(source_case, self.case_path) # Run CFD on base case self.run() self.shell("cp -r 0 0_orig") #initial field def postRunTestCheckConverged(self): try: self.isNotEqual(value=self.runInfo()["time"], target=self.controlDict()["endTime"], message="Reached endTime -> not converged") except: warnings.warn( "Warning: The CFD simulation may not have converged. If this is shown at problem instantiation, then you can safely ignore as no CFD simulation was performed." ) self.shell("cp -r 0 0_orig") #initial fields def SnappyHexMeshrun(self): subprocess.call(['rm', '-r', 'constant/polyMesh/*'], cwd=self.case_path) subprocess.call(['rm', '-r', '0'], cwd=self.case_path) subprocess.call(['cp', '-r', '0_orig', '0'], cwd=self.case_path) ###then stl from binary to ascii barry = mesh.Mesh.from_file(self.case_path + 'constant/triSurface/ribbon.stl') barry.save("ribbon.stl", mode=stl.ASCII) subprocess.call(['mv', 'ribbon.stl', self.case_path]) subprocess.call([ 'surfaceTransformPoints', '-rotate', " ((0 1 0)(0 0 1)) ", 'ribbon.stl', 'ribbon_modified.stl' ], stdout=self.stdout, cwd=self.case_path) with open(self.case_path + 'mergedVolume.stl', 'w') as outfile: for infile in (self.case_path + 'ribbon_modified.stl', self.case_path + 'Hollorfsen_Cervantes_walls_cfmesh.stl'): shutil.copyfileobj(open(infile), outfile) subprocess.call(['rm', '-r', 'mergedTotal.stl'], cwd=self.case_path) subprocess.call(['rm', '-r', 'mergedTotal2.stl'], cwd=self.case_path) subprocess.call(['rm', '-r', 'mergedTotal3.stl'], cwd=self.case_path) with open(self.case_path + 'mergedTotal.stl', 'w') as outfile: for infile in (self.case_path + 'Hollorfsen_Cervantes_inflow.stl', self.case_path + 'RCONE.stl', self.case_path + 'outflow_extension.stl', self.case_path + 'extended_outflow2.stl', self.case_path + 'mergedVolume.stl'): shutil.copyfileobj(open(infile), outfile) subprocess.call([ 'surfaceTransformPoints', '-scale', " (0.001 0.001 0.001) ", 'mergedTotal.stl', 'mergedVolume2.stl' ], stdout=self.stdout, cwd=self.case_path) snappy = BasicRunner(argv=[self.solver2, "-case", self.case_path], silent=False) snappy.start() decompose = BasicRunner(argv=[self.solver3, "-case", self.case_path], silent=False) decompose.start() def run_log(self, cmd, cwd, filename): logfile = open(cwd + filename, 'w') ret_code = subprocess.call(cmd, cwd=cwd, stdout=logfile) return ret_code def Optimisationrun(self): #run simpleFoam #import pdb; pdb.set_trace() current = os.getcwd() subprocess.call( ['mpirun', '-np', str(self.ncores), self.solver4, '-parallel'], cwd=current + '/' + self.case_path, stdout=self.stdout) self.run_log(['mpirun', '-np', str(self.ncores) , self.solver1 , '-parallel'], \ cwd=current+'/'+self.case_path, \ filename='log.txt') subprocess.call( ['reconstructPar', '-latestTime', '-case', self.case_path], stdout=self.stdout) def RunUtilities(self, sense='single'): lines = [] lines2 = [] if os.path.isdir(self.case_path + "10000"): N = 1000 else: N = 1 subprocess.call( ['pyFoamCopyLastToFirst.py', self.case_path, self.case_path]) subprocess.call([ 'pyFoamClearCase.py', self.case_path, '--processors-remove', '--keep-postprocessing' ]) # Get the pressure difference (Using an external utility) pUtil = UtilityRunner( argv=[self.pCmd, "-case", self.case_path, "-latestTime"], silent=True, logname="Pressure") pUtil.add("PressureDifference", "Pressure drop = (%f%) between inlet and outlet", idNr=1) pUtil.start() deltaP = UtilityRunner.get(pUtil, "PressureDifference")[0] if sense == "multi": # Get the mass flow (Using an external utility) mUtil = UtilityRunner( argv=[self.mCmd, "-case", self.case_path, "-latestTime"], silent=True, logname="MassFlow") mUtil.add("mass", "Flux at outlet = (%f%)", idNr=1) mUtil.start() massFlow = UtilityRunner.get(mUtil, "mass")[0] return -float(deltaP), -float(massFlow) else: return -float(deltaP) def __cost_function(self, sense='single'): """ A method to run CFD for the new shape. Kwargs: sense (str): whther to return single or multi-objective values. """ self.SnappyHexMeshrun() self.Optimisationrun() if sense == "single": p = self.RunUtilities() subprocess.call([ 'cp', '-r', self.case_path, self.case_path[:-1] + '_' + str(self.counter) + '/' ]) self.counter += 1 return p elif sense == "multi": p, m = self.RunUtilities(sense=sense) subprocess.call([ 'cp', '-r', self.case_path, self.case_path[:-1] + '_' + str(self.counter) + '/' ]) self.counter += 1 return p, m else: print("Invalid input for sense: ", sense) print("Available options are: 'single' or 'multi'") return None def cost_function(self, sense='single', verbose=False): """ A method to run CFD for the new shape. Kwargs: sense (str): whther to return single or multi-objective values. """ if verbose: self.stdout = sys.stdout return self.__cost_function(sense=sense) else: self.stdout = open(os.devnull, 'wb') with nostdout(): return self.__cost_function(sense=sense)
print "======" # Utworzenie folderu case'u i-tej serii obliczeń i przekopiowanie do niego folderów "0", "constant" i "system" z case'u obliczanego w poprzedniej serii case_i_dir = case_dir + "_" + str(i) orig=SolutionDirectory(case_prev_dir, archive=None, paraviewLink=False) work=orig.cloneCase(case_i_dir) # Modyfikacja w pliku "turbulenceProperties" wartości wybranych współczynników modelu turbulencji i ich sczytanie turb_coeffs_values_updated = write_read_turbulence_coefficients(i, case_i_dir, turb_coeffs, turb_coeffs_values, delta_turb_coeffs) turb_coeffs_values = turb_coeffs_values_updated # Dekompozycja case'u na potrzeby obliczeń równoległych print "\nDecomposing case" Decomposer(args=["--progress", work.name, cpu_number]) CaseReport(args=["--decomposition", work.name]) machine=LAMMachine(nr=cpu_number) # Obliczenia print "Running calculations\n" theRun=BasicRunner(argv=["simpleFoam", "-case", work.name], silent=True, lam=machine) theRun.start() print "Calculations finish\n" # Rekonstrukcja case'u po zakończeniu obliczeń print "Reconstructing case\n" reconstruction=BasicRunner(argv=["reconstructPar", "-case", work.name], silent=True) reconstruction.start() # Lokalizacja punktu oderwania przepływu cases_numbers_updated, detachment_point_coordinates_updated = dp.find_detachment_point(case_i_dir, i, cases_numbers, detachment_point_coordinates) cases_numbers = cases_numbers_updated
def run2dHillBase(template0, target0, hillName, AR, r, x, Ls, L, L1, H, x0, z0, us, yM, h, caseType): # case definitions Martinez2DBump ks = 19.58 * z0 # [m] Martinez 2011 k = 0.4 Cmu = 0.03 # Castro 96 Htop = Href = H # [m] # yp/ks = 0.02 = x/ks funky = 0 plotMartinez = 1 hSample = 10 fac = 10 # currecting calculation of number of cells and Rx factor to get a smooth transition # from the inner refined cell and the outer less refined cells of the blockMesh Mesh procnr = 8 caseStr = "_AR_" + str(AR) + "_z0_" + str(z0) if caseType == "Crude": caseStr = caseStr + "Crude" target = target0 + caseStr orig = SolutionDirectory(template0, archive=None, paraviewLink=False) #-------------------------------------------------------------------------------------- # cloaning case #-------------------------------------------------------------------------------------- work = orig.cloneCase(target) #-------------------------------------------------------------------------------------- # creating mesh #-------------------------------------------------------------------------------------- y0 = 2 * x * z0 # setting first cell according to Martinez 2011 p. 25 ny = int(round( math.log(H / y0 * (r - 1) + 1) / math.log(r))) # number of cells in the y direction of the hill block Ry = r**(ny - 1.) nx = int(L / x0 - 1) rx = max(r, 1.1) ns = int( round(math.log((Ls - L) / x0 * (rx - 1) / rx**fac + 1) / math.log(rx)) ) # number of cells in the x direction of the hill block Rx = rx**(ns - 1) # changing blockMeshDict - from template file if AR == 1000: # if flat terrain bmName = path.join(work.constantDir(), "polyMesh/blockMeshDict") template = TemplateFile(bmName + "_flat_3cell.template") else: bmName = path.join(work.constantDir(), "polyMesh/blockMeshDict") template = TemplateFile(bmName + "_3cell.template") template.writeToFile( bmName, { 'H': H, 'ny': ny, 'Ry': Ry, 'nx': nx, 'L': L, 'L1': L1, 'Ls': Ls, 'Rx': Rx, 'Rx_one_over': 1 / Rx, 'ns': ns }) # writing ground shape (hill, or whatever you want - equation in function writeGroundShape.py) # sample file is changed as well - for sampling h=10 meters above ground sampleName = path.join(work.systemDir(), "sampleDict.template") write2dShape(bmName, H, L, sampleName, hSample, hillName, AR) # changing Y line limits bmName = path.join(work.systemDir(), "sampleDict") template = TemplateFile(bmName + ".template") if AR == 1000: # if flat terrain template.writeToFile(bmName, {'hillTopY': 0, 'maxY': yM * 10}) else: template.writeToFile(bmName, {'hillTopY': h, 'maxY': yM * 10 + h}) # running blockMesh blockRun = BasicRunner(argv=["blockMesh", '-case', work.name], silent=True, server=False, logname="blockMesh") blockRun.start() if not blockRun.runOK(): error("there was an error with blockMesh") #-------------------------------------------------------------------------------------- # changing inlet profile - - - - according to Martinez 2010 #-------------------------------------------------------------------------------------- # change inlet profile Uref = Utop = us / k * math.log(Href / z0) # calculating turbulentKE TKE = us * us / math.sqrt(Cmu) # 1: changing ABLConditions bmName = path.join(work.initialDir(), "include/ABLConditions") template = TemplateFile(bmName + ".template") template.writeToFile(bmName, { 'us': us, 'Uref': Uref, 'Href': Href, 'z0': z0 }) # 2: changing initialConditions bmName = path.join(work.initialDir(), "include/initialConditions") template = TemplateFile(bmName + ".template") template.writeToFile(bmName, {'TKE': TKE}) if funky: # 3: changing U (inserting variables into groovyBC for inlet profile) bmName = path.join(work.initialDir(), "U") template = TemplateFile(bmName + ".template") template.writeToFile(bmName, { 'us': us, 'z0': z0, 'K': k, 'Utop': Utop }) # 4: changing k (inserting variables into groovyBC for inlet profile) bmName = path.join(work.initialDir(), "k") template = TemplateFile(bmName + ".template") template.writeToFile(bmName, { 'us': us, 'z0': z0, 'K': k, 'Utop': Utop, 'Cmu': Cmu }) # 5: changing epsilon (inserting variables into groovyBC for inlet profile) bmName = path.join(work.initialDir(), "epsilon") template = TemplateFile(bmName + ".template") template.writeToFile(bmName, { 'us': us, 'z0': z0, 'K': k, 'Utop': Utop, 'Cmu': Cmu }) # 6: changing initial and boundary conditions for new z0 # changing ks in nut, inside nutRoughWallFunction nutFile = ParsedParameterFile(path.join(work.initialDir(), "nut")) nutFile["boundaryField"]["ground"]["Ks"].setUniform(ks) nutFile.writeFile() # 7: changing convergence criterion for Crude runs if caseType == "Crude": fvSolutionFile = ParsedParameterFile( path.join(work.systemDir(), "fvSolution")) fvSolutionFile["SIMPLE"]["residualControl"]["p"] = 1e-4 fvSolutionFile["SIMPLE"]["residualControl"]["U"] = 1e-4 fvSolutionFile.writeFile() # mapping fields - From earlier result if exists if caseType == "mapFields": #finding the most converged run. assuming the "crude" run had the same dirName with "Crude" attached setName = glob.glob(target + 'Crude/sets/*') lastRun = range(len(setName)) for num in range(len(setName)): lastRun[num] = int(setName[num][setName[num].rfind("/") + 1:]) sourceTimeArg = str(max(lastRun)) mapRun = BasicRunner(argv=[ 'mapFields -consistent -sourceTime ' + sourceTimeArg + ' -case ' + work.name + ' ' + target + "Crude" ], silent=True, server=False, logname='mapLog') mapRun.start() # parallel rule cells = nx * (ny + 2 * ns) print "Mesh has " + str(cells) + " cells" if cells > 20000: parallel = 1 else: parallel = 0 if parallel: #-------------------------------------------------------------------------------------- # decomposing #-------------------------------------------------------------------------------------- # removing U.template from 0/ directory subprocess.call("rm " + bmName + ".template ", shell=True) arg = " -case " + work.name decomposeRun = BasicRunner(argv=["decomposePar -force" + arg], silent=True, server=False, logname="decompose") decomposeRun.start() #-------------------------------------------------------------------------------------- # running #-------------------------------------------------------------------------------------- machine = LAMMachine(nr=procnr) # run case PlotRunner(args=[ "--proc=%d" % procnr, "--progress", "simpleFoam", "-case", work.name ]) #-------------------------------------------------------------------------------------- # reconstruct #------------------------------------------------------------------------- reconstructRun = BasicRunner(argv=["reconstructPar -latestTime" + arg], silent=True, server=False, logname="reconstructLog") reconstructRun.start() else: #-------------------------------------------------------------------------------------- # running #-------------------------------------------------------------------------------------- PlotRunner(args=["--progress", "simpleFoam", "-case", work.name]) # sample results dirNameList = glob.glob(target + "*") dirNameList.sort() for dirName in dirNameList: # sampling arg = " -case " + dirName + "/" sampleRun = BasicRunner(argv=["sample -latestTime" + arg], silent=True, server=False, logname="sampleLog") sampleRun.start() #finding the most converged run. setName = glob.glob(dirName + '/sets/*') lastRun = range(len(setName)) for num in range(len(setName)): lastRun[num] = int(setName[num][setName[num].rfind("/") + 1:]) m = max(lastRun) p = lastRun.index(m) data_y = genfromtxt(setName[p] + '/line_y_U.xy', delimiter=' ') y, Ux_y, Uy_y = data_y[:, 0], data_y[:, 1], data_y[:, 2] if AR < 1000: # if terrain isn't flat y = y - h # normalizing data to height of hill-top above ground return y, Ux_y, Uy_y
# run the new case # creating mesh if withBlock == 1: blockRun = BasicRunner(argv=["blockMesh", '-case', work.name], silent=True, server=False, logname="blocky") print "Running blockMesh" blockRun.start() if not blockRun.runOK(): error("there was an error with blockMesh") # decomposing print "Decomposing" Decomposer(args=["--progress", work.name, 2]) CaseReport(args=["--decomposition", work.name]) # running machine = LAMMachine(nr=2) # laminar case for better first guess (rarely converges for 2D case with simpleFoam) print "running laminar case" turb = ParsedParameterFile(path.join(work.name, 'constant/RASProperties')) turb["turbulence"] = "off" turb.writeFile() dic = ParsedParameterFile(path.join(work.name, 'system/controlDict')) dic["stopAt"] = "endTime" dic["endTime"] = 750 dic.writeFile() PlotRunner(args=["--proc=2", "simpleFoam", "-case", work.name]) # turbulent turned on print "turning turbulence on" turb["turbulence"] = "on" turb.writeFile()
def run(self): config = ConfigParser.ConfigParser() files = self.parser.getArgs() good = config.read(files) # will work with 2.4 # if len(good)!=len(files): # print_("Problem while trying to parse files",files) # print_("Only ",good," could be parsed") # sys.exit(-1) benchName = config.get("General", "name") if self.opts.nameAddition != None: benchName += "_" + self.opts.nameAddition if self.opts.foamVersion != None: benchName += "_v" + self.opts.foamVersion isParallel = config.getboolean("General", "parallel") lam = None if isParallel: nrCpus = config.getint("General", "nProcs") machineFile = config.get("General", "machines") if not path.exists(machineFile): self.error("Machine file ", machineFile, "needed for parallel run") lam = LAMMachine(machineFile, nr=nrCpus) if lam.cpuNr() > nrCpus: self.error("Wrong number of CPUs: ", lam.cpuNr()) print_("Running parallel on", lam.cpuNr(), "CPUs") if config.has_option("General", "casesDirectory"): casesDirectory = path.expanduser( config.get("General", "casesDirectory")) else: casesDirectory = foamTutorials() if not path.exists(casesDirectory): self.error("Directory", casesDirectory, "needed with the benchmark cases is missing") else: print_("Using cases from directory", casesDirectory) benchCases = [] config.remove_section("General") for sec in config.sections(): print_("Reading: ", sec) skipIt = False skipReason = "" if config.has_option(sec, "skip"): skipIt = config.getboolean(sec, "skip") skipReason = "Switched off in file" if self.opts.excases != None and not skipIt: for p in self.opts.excases: if fnmatch(sec, p): skipIt = True skipReason = "Switched off by pattern '" + p + "'" if self.opts.cases != None: for p in self.opts.cases: if fnmatch(sec, p): skipIt = False skipReason = "" if skipIt: print_("Skipping case ..... Reason:" + skipReason) continue sol = config.get(sec, "solver") cas = config.get(sec, "case") pre = eval(config.get(sec, "prepare")) preCon = [] if config.has_option(sec, "preControlDict"): preCon = eval(config.get(sec, "preControlDict")) con = eval(config.get(sec, "controlDict")) bas = config.getfloat(sec, "baseline") wei = config.getfloat(sec, "weight") add = [] if config.has_option(sec, "additional"): add = eval(config.get(sec, "additional")) print_("Adding: ", add) util = [] if config.has_option(sec, "utilities"): util = eval(config.get(sec, "utilities")) print_("Utilities: ", util) nr = 99999 if config.has_option(sec, "nr"): nr = eval(config.get(sec, "nr")) sp = None if config.has_option(sec, "blockSplit"): sp = eval(config.get(sec, "blockSplit")) toRm = [] if config.has_option(sec, "filesToRemove"): toRm = eval(config.get(sec, "filesToRemove")) setInit = [] if config.has_option(sec, "setInitial"): setInit = eval(config.get(sec, "setInitial")) parallelOK = False if config.has_option(sec, "parallelOK"): parallelOK = config.getboolean(sec, "parallelOK") deMet = ["metis"] if config.has_option(sec, "decomposition"): deMet = config.get(sec, "decomposition").split() if deMet[0] == "metis": pass elif deMet[0] == "simple": if len(deMet) < 2: deMet.append(0) else: deMet[1] = int(deMet[1]) else: print_("Unimplemented decomposition method", deMet[0], "switching to metis") deMet = ["metis"] if isParallel == False or parallelOK == True: if path.exists(path.join(casesDirectory, sol, cas)): benchCases.append( (nr, sec, sol, cas, pre, con, preCon, bas, wei, add, util, sp, toRm, setInit, deMet)) else: print_("Skipping", sec, "because directory", path.join(casesDirectory, sol, cas), "could not be found") else: print_("Skipping", sec, "because not parallel") benchCases.sort() parallelString = "" if isParallel: parallelString = ".cpus=" + str(nrCpus) resultFile = open( "Benchmark." + benchName + "." + uname()[1] + parallelString + ".results", "w") totalSpeedup = 0 minSpeedup = None maxSpeedup = None totalWeight = 0 runsOK = 0 currentEstimate = 1. print_("\nStart Benching\n") csv = CSVCollection("Benchmark." + benchName + "." + uname()[1] + parallelString + ".csv") # csvHeaders=["description","solver","case","caseDir","base", # "benchmark","machine","arch","cpus","os","version", # "wallclocktime","cputime","cputimeuser","cputimesystem","maxmemory","cpuusage","speedup"] for nr, description, solver, case, prepare, control, preControl, base, weight, additional, utilities, split, toRemove, setInit, decomposition in benchCases: # control.append( ("endTime",-2000) ) print_("Running Benchmark: ", description) print_("Solver: ", solver) print_("Case: ", case) caseName = solver + "_" + case + "_" + benchName + "." + uname( )[1] + ".case" print_("Short name: ", caseName) caseDir = caseName + ".runDir" csv["description"] = description csv["solver"] = solver csv["case"] = case csv["caseDir"] = caseDir csv["base"] = base csv["benchmark"] = benchName csv["machine"] = uname()[1] csv["arch"] = uname()[4] if lam == None: csv["cpus"] = 1 else: csv["cpus"] = lam.cpuNr() csv["os"] = uname()[0] csv["version"] = uname()[2] workDir = path.realpath(path.curdir) orig = SolutionDirectory(path.join(casesDirectory, solver, case), archive=None, paraviewLink=False) for a in additional + utilities: orig.addToClone(a) orig.cloneCase(path.join(workDir, caseDir)) if oldApp(): argv = [solver, workDir, caseDir] else: argv = [solver, "-case", path.join(workDir, caseDir)] run = BasicRunner(silent=True, argv=argv, logname="BenchRunning", lam=lam) runDir = run.getSolutionDirectory() controlFile = ParameterFile(runDir.controlDict()) for name, value in preControl: print_("Setting parameter", name, "to", value, "in controlDict") controlFile.replaceParameter(name, value) for rm in toRemove: fn = path.join(caseDir, rm) print_("Removing file", fn) remove(fn) for field, bc, val in setInit: print_("Setting", field, "on", bc, "to", val) SolutionFile(runDir.initialDir(), field).replaceBoundary(bc, val) oldDeltaT = controlFile.replaceParameter("deltaT", 0) for u in utilities: print_("Building utility ", u) execute("wmake 2>&1 >%s %s" % (path.join( caseDir, "BenchCompile." + u), path.join(caseDir, u))) print_("Preparing the case: ") if lam != None: prepare = prepare + [("decomposePar", "")] if decomposition[0] == "metis": lam.writeMetis( SolutionDirectory(path.join(workDir, caseDir))) elif decomposition[0] == "simple": lam.writeSimple( SolutionDirectory(path.join(workDir, caseDir)), decomposition[1]) if split: print_("Splitting the mesh:", split) bm = BlockMesh(runDir.blockMesh()) bm.refineMesh(split) for pre, post in prepare: print_("Doing ", pre, " ....") post = post.replace("%case%", caseDir) if oldApp(): args = string.split("%s %s %s %s" % (pre, workDir, caseDir, post)) else: args = string.split( "%s -case %s %s" % (pre, path.join(workDir, caseDir), post)) util = BasicRunner(silent=True, argv=args, logname="BenchPrepare_" + pre) util.start() controlFile.replaceParameter("deltaT", oldDeltaT) # control.append(("endTime",-1000)) for name, value in control: print_("Setting parameter", name, "to", value, "in controlDict") controlFile.replaceParameter(name, value) print_("Starting at ", asctime(localtime(time()))) print_( " Baseline is %f, estimated speedup %f -> estimated end at %s " % (base, currentEstimate, asctime(localtime(time() + base / currentEstimate)))) print_("Running the case ....") run.start() speedup = None cpuUsage = 0 speedupOut = -1 try: speedup = base / run.run.wallTime() cpuUsage = 100. * run.run.cpuTime() / run.run.wallTime() except ZeroDivisionError: print_("Division by Zero: ", run.run.wallTime()) if not run.runOK(): print_("\nWARNING!!!!") print_( "Run had a problem, not using the results. Check the log\n" ) speedup = None if speedup != None: speedupOut = speedup totalSpeedup += speedup * weight totalWeight += weight runsOK += 1 if maxSpeedup == None: maxSpeedup = speedup elif speedup > maxSpeedup: maxSpeedup = speedup if minSpeedup == None: minSpeedup = speedup elif speedup < minSpeedup: minSpeedup = speedup print_("Wall clock: ", run.run.wallTime()) print_("Speedup: ", speedup, " (Baseline: ", base, ")") print_("CPU Time: ", run.run.cpuTime()) print_("CPU Time User: "******"CPU Time System: ", run.run.cpuSystemTime()) print_("Memory: ", run.run.usedMemory()) print_("CPU Usage: %6.2f%%" % (cpuUsage)) csv["wallclocktime"] = run.run.wallTime() csv["cputime"] = run.run.cpuTime() csv["cputimeuser"] = run.run.cpuUserTime() csv["cputimesystem"] = run.run.cpuSystemTime() csv["maxmemory"] = run.run.usedMemory() csv["cpuusage"] = cpuUsage if speedup != None: csv["speedup"] = speedup else: csv["speedup"] = "##" csv.write() resultFile.write( "Case %s WallTime %g CPUTime %g UserTime %g SystemTime %g Memory %g MB Speedup %g\n" % (caseName, run.run.wallTime(), run.run.cpuTime(), run.run.cpuUserTime(), run.run.cpuSystemTime(), run.run.usedMemory(), speedupOut)) resultFile.flush() if speedup != None: currentEstimate = totalSpeedup / totalWeight if self.opts.removeCases: print_("Clearing case", end=" ") if speedup == None: print_("not ... because it failed") else: print_("completely") rmtree(caseDir, ignore_errors=True) print_() print_()