def run(self): sources=[STLFile(f) for f in self.parser.getArgs()] rst=RestructuredTextHelper() if self.opts.patchNames: print rst.buildHeading("Patch names",level=RestructuredTextHelper.LevelSection) for s in sources: print rst.buildHeading(s.filename(),level=RestructuredTextHelper.LevelSubSection) for p in s.patchInfo(): print p["name"] if self.opts.info: print rst.buildHeading("Patch info",level=RestructuredTextHelper.LevelSection) for s in sources: print rst.buildHeading(s.filename(),level=RestructuredTextHelper.LevelSubSection) tab=rst.table() tab[0]=["name","facets","range in file","bounding box"] tab.addLine(head=True) for i,p in enumerate(s.patchInfo()): tab[(i+1,0)]=p["name"] tab[(i+1,1)]=p["facets"] tab[(i+1,2)]="%d-%d" % (p["start"],p["end"]) tab[(i+1,3)]="(%g %g %g) - (%g %g %g)" % tuple(p["min"]+p["max"]) print tab if self.opts.joinTo: if path.exists(self.opts.joinTo): self.error("File",self.opts.joinTo,"does allready exist") result=STLFile() for s in sources: result+=s result.writeTo(self.opts.joinTo)
def makeReport(self, values, level=2, meta=None): if meta is None: meta = self.metaData helper = RestructuredTextHelper(defaultHeading=level) val = "" for k in meta: if k == "": if len(meta[k]) == 0: continue tab = helper.table(labeled=True) for kk in meta[k]: if "default" in meta[k][ kk] and values[kk] != meta[k][kk]["default"]: changed = True tab.addRow(helper.strong(kk)) else: changed = False tab.addRow(kk) for a, v in iteritems(meta[k][kk]): tab.addItem(a, v) if changed: tab.addItem("Value", helper.strong(values[kk])) else: tab.addItem("Value", values[kk]) val += str(tab) else: descr, newMeta = meta[k] val += helper.heading(descr) val += "\nShort name: " + helper.literal(k) + "\n" val += self.makeReport(values, level=level + 1, meta=newMeta) return val
def makeReport(self,values,level=2,meta=None): if meta is None: meta=self.metaData helper=RestructuredTextHelper(defaultHeading=level) val="" for k in meta: if k=="": if len(meta[k])==0: continue tab=helper.table(labeled=True) for kk in meta[k]: if "default" in meta[k][kk] and values[kk]!=meta[k][kk]["default"]: changed=True tab.addRow(helper.strong(kk)) else: changed=False tab.addRow(kk) for a,v in iteritems(meta[k][kk]): tab.addItem(a,v) if changed: tab.addItem("Value",helper.strong(values[kk])) else: tab.addItem("Value",values[kk]) val+=str(tab) else: descr,newMeta=meta[k] val+=helper.heading(descr) val+="\nShort name: "+helper.literal(k)+"\n" val+=self.makeReport(values, level=level+1, meta=newMeta) return val
def doRegion(self, theRegion): ReST = RestructuredTextHelper(defaultHeading=self.opts.headingLevel) if self.opts.allRegions: print_( ReST.buildHeading("Region: ", theRegion, level=self.opts.headingLevel - 1)) sol = SolutionDirectory(self.parser.getArgs()[0], archive=None, parallel=self.opts.parallel, paraviewLink=False, region=theRegion) if self.opts.all: self.opts.caseSize = True self.opts.shortBCreport = True self.opts.longBCreport = True self.opts.dimensions = True self.opts.internal = True self.opts.linearSolvers = True self.opts.relaxationFactors = True self.opts.processorMatrix = True self.opts.decomposition = True if self.opts.time: try: self.opts.time = sol.timeName( sol.timeIndex(self.opts.time, minTime=True)) except IndexError: error("The specified time", self.opts.time, "doesn't exist in the case") print_("Using time t=" + self.opts.time + "\n") needsPolyBoundaries = False needsInitialTime = False if self.opts.longBCreport: needsPolyBoundaries = True needsInitialTime = True if self.opts.shortBCreport: needsPolyBoundaries = True needsInitialTime = True if self.opts.dimensions: needsInitialTime = True if self.opts.internal: needsInitialTime = True if self.opts.decomposition: needsPolyBoundaries = True defaultProc = None if self.opts.parallel: defaultProc = 0 if needsPolyBoundaries: proc = None boundary = BoundaryDict( sol.name, region=theRegion, time=self.opts.time, treatBinaryAsASCII=self.opts.boundaryTreatBinaryAsASCII, processor=defaultProc) boundMaxLen = 0 boundaryNames = [] for b in boundary: if b.find("procBoundary") != 0: boundaryNames.append(b) if self.opts.patches != None: tmp = boundaryNames boundaryNames = [] for b in tmp: for p in self.opts.patches: if fnmatch(b, p): boundaryNames.append(b) break if self.opts.expatches != None: tmp = boundaryNames boundaryNames = [] for b in tmp: keep = True for p in self.opts.expatches: if fnmatch(b, p): keep = False break if keep: boundaryNames.append(b) for b in boundaryNames: boundMaxLen = max(boundMaxLen, len(b)) boundaryNames.sort() if self.opts.time == None: procTime = "constant" else: procTime = self.opts.time if needsInitialTime: fields = {} if self.opts.time == None: try: time = sol.timeName(0) except IndexError: error("There is no timestep in the case") else: time = self.opts.time tDir = sol[time] nameMaxLen = 0 for f in tDir: try: fields[f.baseName()] = f.getContent( listLengthUnparsed=self.opts.longlist, treatBinaryAsASCII=self.opts.treatBinaryAsASCII, doMacroExpansion=self.opts.doMacros) nameMaxLen = max(nameMaxLen, len(f.baseName())) except PyFoamParserError: e = sys.exc_info()[ 1] # Needed because python 2.5 does not support 'as e' warning("Couldn't parse", f.name, "because of an error:", e, " -> skipping") fieldNames = list(fields.keys()) fieldNames.sort() if self.opts.caseSize: print_(ReST.heading("Size of the case")) nFaces = 0 nPoints = 0 nCells = 0 if self.opts.parallel: procs = list(range(sol.nrProcs())) print_("Accumulated from", sol.nrProcs(), "processors") else: procs = [None] for p in procs: info = MeshInformation(sol.name, processor=p, region=theRegion, time=self.opts.time) nFaces += info.nrOfFaces() nPoints += info.nrOfPoints() try: nCells += info.nrOfCells() except: nCells = "Not available" tab = ReST.table() tab[0] = ("Faces", nFaces) tab[1] = ("Points", nPoints) tab[2] = ("Cells", nCells) print_(tab) if self.opts.decomposition: print_(ReST.heading("Decomposition")) if sol.nrProcs() < 2: print_("This case is not decomposed") else: print_("Case is decomposed for", sol.nrProcs(), "processors") print_() nCells = [] nFaces = [] nPoints = [] for p in sol.processorDirs(): info = MeshInformation(sol.name, processor=p, region=theRegion, time=self.opts.time) nPoints.append(info.nrOfPoints()) nFaces.append(info.nrOfFaces()) nCells.append(info.nrOfCells()) digits = int( ceil( log10( max(sol.nrProcs(), max(nCells), max(nFaces), max(nPoints))))) + 2 nameLen = max(len("Points"), boundMaxLen) tab = ReST.table() tab[0] = ["CPU"] + list(range(sol.nrProcs())) tab.addLine() tab[1] = ["Points"] + nPoints tab[2] = ["Faces"] + nFaces tab[3] = ["Cells"] + nCells tab.addLine(head=True) nr = 3 for b in boundaryNames: nr += 1 tab[(nr, 0)] = b for i, p in enumerate(sol.processorDirs()): try: nFaces = ParsedBoundaryDict( sol.boundaryDict(processor=p, region=theRegion, time=self.opts.time), treatBinaryAsASCII=self.opts. boundaryTreatBinaryAsASCII)[b]["nFaces"] except IOError: nFaces = ParsedBoundaryDict( sol.boundaryDict(processor=p, region=theRegion), treatBinaryAsASCII=self.opts. boundaryTreatBinaryAsASCII)[b]["nFaces"] except KeyError: nFaces = 0 tab[(nr, i + 1)] = nFaces print_(tab) if self.opts.longBCreport: print_(ReST.heading("The boundary conditions for t =", time)) for b in boundaryNames: print_( ReST.buildHeading("Boundary: ", b, level=self.opts.headingLevel + 1)) bound = boundary[b] print_(":Type:\t", bound["type"]) if "physicalType" in bound: print_(":Physical:\t", bound["physicalType"]) print_(":Faces:\t", bound["nFaces"]) print_() heads = ["Field", "type"] tab = ReST.table() tab[0] = heads tab.addLine(head=True) for row, fName in enumerate(fieldNames): tab[(row + 1, 0)] = fName f = fields[fName] if "boundaryField" not in f: tab[(row + 1, 1)] = "Not a field file" elif b not in f["boundaryField"]: tab[(row + 1, 1)] = "MISSING !!!" else: bf = f["boundaryField"][b] for k in bf: try: col = heads.index(k) except ValueError: col = len(heads) tab[(0, col)] = k heads.append(k) cont = str(bf[k]) if type(bf[k]) == Field: if bf[k].isBinary(): cont = bf[k].binaryString() if cont.find("\n") >= 0: tab[(row + 1, col)] = cont[:cont.find("\n")] + "..." else: tab[(row + 1, col)] = cont print_(tab) if self.opts.shortBCreport: print_(ReST.heading("Table of boundary conditions for t =", time)) types = {} hasPhysical = False for b in boundary: if "physicalType" in boundary[b]: hasPhysical = True types[b] = {} for fName in fields: f = fields[fName] try: if b not in f["boundaryField"]: types[b][fName] = "MISSING" else: types[b][fName] = f["boundaryField"][b]["type"] except KeyError: types[b][fName] = "Not a field" tab = ReST.table() tab[0] = [""] + boundaryNames tab.addLine() tab[(1, 0)] = "Patch Type" for i, b in enumerate(boundaryNames): tab[(1, i + 1)] = boundary[b]["type"] nr = 2 if hasPhysical: tab[(nr, 0)] = "Physical Type" for i, b in enumerate(boundaryNames): if "physicalType" in boundary[b]: tab[(nr, i + 1)] = boundary[b]["physicalType"] nr += 1 tab[(nr, 0)] = "Length" for i, b in enumerate(boundaryNames): tab[(nr, i + 1)] = boundary[b]["nFaces"] nr += 1 tab.addLine(head=True) for fName in fieldNames: tab[(nr, 0)] = fName for i, b in enumerate(boundaryNames): tab[(nr, i + 1)] = types[b][fName] nr += 1 print_(tab) if self.opts.dimensions: print_(ReST.heading("Dimensions of fields for t =", time)) tab = ReST.table() tab[0] = ["Name"] + "[ kg m s K mol A cd ]".split()[1:-1] tab.addLine(head=True) for i, fName in enumerate(fieldNames): f = fields[fName] try: dim = str(f["dimensions"]).split()[1:-1] except KeyError: dim = ["-"] * 7 tab[i + 1] = [fName] + dim print_(tab) if self.opts.internal: print_(ReST.heading("Internal value of fields for t =", time)) tab = ReST.table() tab[0] = ["Name", "Value"] tab.addLine(head=True) for i, fName in enumerate(fieldNames): f = fields[fName] try: if f["internalField"].isBinary(): val = f["internalField"].binaryString() else: cont = str(f["internalField"]) if cont.find("\n") >= 0: val = cont[:cont.find("\n")] + "..." else: val = cont except KeyError: val = "Not a field file" tab[i + 1] = [fName, val] print_(tab) if self.opts.processorMatrix: print_(ReST.heading("Processor matrix")) if sol.nrProcs() < 2: print_("This case is not decomposed") else: matrix = [[ 0, ] * sol.nrProcs() for i in range(sol.nrProcs())] for i, p in enumerate(sol.processorDirs()): try: bound = ParsedBoundaryDict( sol.boundaryDict(processor=p, region=theRegion, time=self.opts.time), treatBinaryAsASCII=self.opts. boundaryTreatBinaryAsASCII) except IOError: bound = ParsedBoundaryDict( sol.boundaryDict(processor=p, treatBinaryAsASCII=self.opts. boundaryTreatBinaryAsASCII, region=theRegion), treatBinaryAsASCII=self.opts. boundaryTreatBinaryAsASCII) for j in range(sol.nrProcs()): name = "procBoundary%dto%d" % (j, i) name2 = "procBoundary%dto%d" % (i, j) if name in bound: matrix[i][j] = bound[name]["nFaces"] if name2 in bound: matrix[i][j] = bound[name2]["nFaces"] print_("Matrix of processor interactions (faces)") print_() tab = ReST.table() tab[0] = ["CPU"] + list(range(sol.nrProcs())) tab.addLine(head=True) for i, col in enumerate(matrix): tab[i + 1] = [i] + matrix[i] print_(tab) if self.opts.linearSolvers: print_(ReST.heading("Linear Solvers")) linTable = ReST.table() fvSol = ParsedParameterFile( path.join(sol.systemDir(), "fvSolution"), treatBinaryAsASCII=self.opts.treatBinaryAsASCII) allInfo = {} for sName in fvSol["solvers"]: raw = fvSol["solvers"][sName] info = {} if type(raw) in [dict, DictProxy]: # fvSolution format in 1.7 try: info["solver"] = raw["solver"] except KeyError: info["solver"] = "<none>" solverData = raw else: info["solver"] = raw[0] solverData = raw[1] if type(solverData) in [dict, DictProxy]: try: info["tolerance"] = solverData["tolerance"] except KeyError: info["tolerance"] = 1. try: info["relTol"] = solverData["relTol"] except KeyError: info["relTol"] = 0. else: # the old (pre-1.5) fvSolution-format info["tolerance"] = solverData info["relTol"] = raw[2] allInfo[sName] = info linTable[0] = ["Name", "Solver", "Abs. Tolerance", "Relative Tol."] linTable.addLine(head=True) nr = 0 for n, i in iteritems(allInfo): nr += 1 linTable[nr] = (n, i["solver"], i["tolerance"], i["relTol"]) print_(linTable) if self.opts.relaxationFactors: print_(ReST.heading("Relaxation")) fvSol = ParsedParameterFile( path.join(sol.systemDir(), "fvSolution"), treatBinaryAsASCII=self.opts.treatBinaryAsASCII) if "relaxationFactors" in fvSol: relax = fvSol["relaxationFactors"] tab = ReST.table() tab[0] = ["Name", "Factor"] tab.addLine(head=True) nr = 0 if "fields" in relax or "equations" in relax: # New syntax for k in ["fields", "equations"]: if k in relax: for n, f in iteritems(relax[k]): nr += 1 tab[nr] = [k + ": " + n, f] else: for n, f in iteritems(relax): nr += 1 tab[nr] = [n, f] print_(tab) else: print_("No relaxation factors defined for this case")
def doRegion(self,theRegion): ReST=RestructuredTextHelper(defaultHeading=self.opts.headingLevel) if self.opts.allRegions: print_(ReST.buildHeading("Region: ",theRegion,level=self.opts.headingLevel-1)) sol=SolutionDirectory(self.parser.getArgs()[0], archive=None, parallel=self.opts.parallel, paraviewLink=False, region=theRegion) if self.opts.all: self.opts.caseSize=True self.opts.shortBCreport=True self.opts.longBCreport=True self.opts.dimensions=True self.opts.internal=True self.opts.linearSolvers=True self.opts.relaxationFactors=True self.opts.processorMatrix=True self.opts.decomposition=True if self.opts.time: try: self.opts.time=sol.timeName(sol.timeIndex(self.opts.time,minTime=True)) except IndexError: error("The specified time",self.opts.time,"doesn't exist in the case") print_("Using time t="+self.opts.time+"\n") needsPolyBoundaries=False needsInitialTime=False if self.opts.longBCreport: needsPolyBoundaries=True needsInitialTime=True if self.opts.shortBCreport: needsPolyBoundaries=True needsInitialTime=True if self.opts.dimensions: needsInitialTime=True if self.opts.internal: needsInitialTime=True if self.opts.decomposition: needsPolyBoundaries=True defaultProc=None if self.opts.parallel: defaultProc=0 if needsPolyBoundaries: proc=None boundary=BoundaryDict(sol.name, region=theRegion, time=self.opts.time, treatBinaryAsASCII=self.opts.treatBinaryAsASCII, processor=defaultProc) boundMaxLen=0 boundaryNames=[] for b in boundary: if b.find("procBoundary")!=0: boundaryNames.append(b) if self.opts.patches!=None: tmp=boundaryNames boundaryNames=[] for b in tmp: for p in self.opts.patches: if fnmatch(b,p): boundaryNames.append(b) break if self.opts.expatches!=None: tmp=boundaryNames boundaryNames=[] for b in tmp: keep=True for p in self.opts.expatches: if fnmatch(b,p): keep=False break if keep: boundaryNames.append(b) for b in boundaryNames: boundMaxLen=max(boundMaxLen,len(b)) boundaryNames.sort() if self.opts.time==None: procTime="constant" else: procTime=self.opts.time if needsInitialTime: fields={} if self.opts.time==None: try: time=sol.timeName(0) except IndexError: error("There is no timestep in the case") else: time=self.opts.time tDir=sol[time] nameMaxLen=0 for f in tDir: try: fields[f.baseName()]=f.getContent(listLengthUnparsed=self.opts.longlist, treatBinaryAsASCII=self.opts.treatBinaryAsASCII, doMacroExpansion=self.opts.doMacros) nameMaxLen=max(nameMaxLen,len(f.baseName())) except PyFoamParserError: e = sys.exc_info()[1] # Needed because python 2.5 does not support 'as e' warning("Couldn't parse",f.name,"because of an error:",e," -> skipping") fieldNames=list(fields.keys()) fieldNames.sort() if self.opts.caseSize: print_(ReST.heading("Size of the case")) nFaces=0 nPoints=0 nCells=0 if self.opts.parallel: procs=list(range(sol.nrProcs())) print_("Accumulated from",sol.nrProcs(),"processors") else: procs=[None] for p in procs: info=MeshInformation(sol.name, processor=p, region=theRegion, time=self.opts.time) nFaces+=info.nrOfFaces() nPoints+=info.nrOfPoints() try: nCells+=info.nrOfCells() except: nCells="Not available" tab=ReST.table() tab[0]=("Faces",nFaces) tab[1]=("Points",nPoints) tab[2]=("Cells",nCells) print_(tab) if self.opts.decomposition: print_(ReST.heading("Decomposition")) if sol.nrProcs()<2: print_("This case is not decomposed") else: print_("Case is decomposed for",sol.nrProcs(),"processors") print_() nCells=[] nFaces=[] nPoints=[] for p in sol.processorDirs(): info=MeshInformation(sol.name, processor=p, region=theRegion, time=self.opts.time) nPoints.append(info.nrOfPoints()) nFaces.append(info.nrOfFaces()) nCells.append(info.nrOfCells()) digits=int(ceil(log10(max(sol.nrProcs(), max(nCells), max(nFaces), max(nPoints) ))))+2 nameLen=max(len("Points"),boundMaxLen) tab=ReST.table() tab[0]=["CPU"]+list(range(sol.nrProcs())) tab.addLine() tab[1]=["Points"]+nPoints tab[2]=["Faces"]+nFaces tab[3]=["Cells"]+nCells tab.addLine(head=True) nr=3 for b in boundaryNames: nr+=1 tab[(nr,0)]=b for i,p in enumerate(sol.processorDirs()): try: nFaces= ParsedBoundaryDict(sol.boundaryDict(processor=p, region=theRegion, time=self.opts.time), treatBinaryAsASCII=self.opts.treatBinaryAsASCII )[b]["nFaces"] except IOError: nFaces= ParsedBoundaryDict(sol.boundaryDict(processor=p, region=theRegion), treatBinaryAsASCII=self.opts.treatBinaryAsASCII )[b]["nFaces"] except KeyError: nFaces=0 tab[(nr,i+1)]=nFaces print_(tab) if self.opts.longBCreport: print_(ReST.heading("The boundary conditions for t =",time)) for b in boundaryNames: print_(ReST.buildHeading("Boundary: ",b,level=self.opts.headingLevel+1)) bound=boundary[b] print_(":Type:\t",bound["type"]) if "physicalType" in bound: print_(":Physical:\t",bound["physicalType"]) print_(":Faces:\t",bound["nFaces"]) print_() heads=["Field","type"] tab=ReST.table() tab[0]=heads tab.addLine(head=True) for row,fName in enumerate(fieldNames): tab[(row+1,0)]=fName f=fields[fName] if "boundaryField" not in f: tab[(row+1,1)]="Not a field file" elif b not in f["boundaryField"]: tab[(row+1,1)]="MISSING !!!" else: bf=f["boundaryField"][b] for k in bf: try: col=heads.index(k) except ValueError: col=len(heads) tab[(0,col)]=k heads.append(k) cont=str(bf[k]) if type(bf[k])==Field: if bf[k].isBinary(): cont= bf[k].binaryString() if cont.find("\n")>=0: tab[(row+1,col)]=cont[:cont.find("\n")]+"..." else: tab[(row+1,col)]=cont print_(tab) if self.opts.shortBCreport: print_(ReST.heading("Table of boundary conditions for t =",time)) types={} hasPhysical=False for b in boundary: if "physicalType" in boundary[b]: hasPhysical=True types[b]={} for fName in fields: f=fields[fName] try: if b not in f["boundaryField"]: types[b][fName]="MISSING" else: types[b][fName]=f["boundaryField"][b]["type"] except KeyError: types[b][fName]="Not a field" tab=ReST.table() tab[0]=[""]+boundaryNames tab.addLine() tab[(1,0)]="Patch Type" for i,b in enumerate(boundaryNames): tab[(1,i+1)]=boundary[b]["type"] nr=2 if hasPhysical: tab[(nr,0)]="Physical Type" for i,b in enumerate(boundaryNames): if "physicalType" in boundary[b]: tab[(nr,i+1)]=boundary[b]["physicalType"] nr+=1 tab[(nr,0)]="Length" for i,b in enumerate(boundaryNames): tab[(nr,i+1)]=boundary[b]["nFaces"] nr+=1 tab.addLine(head=True) for fName in fieldNames: tab[(nr,0)]=fName for i,b in enumerate(boundaryNames): tab[(nr,i+1)]=types[b][fName] nr+=1 print_(tab) if self.opts.dimensions: print_(ReST.heading("Dimensions of fields for t =",time)) tab=ReST.table() tab[0]=["Name"]+"[ kg m s K mol A cd ]".split()[1:-1] tab.addLine(head=True) for i,fName in enumerate(fieldNames): f=fields[fName] try: dim=str(f["dimensions"]).split()[1:-1] except KeyError: dim=["-"]*7 tab[i+1]=[fName]+dim print_(tab) if self.opts.internal: print_(ReST.heading("Internal value of fields for t =",time)) tab=ReST.table() tab[0]=["Name","Value"] tab.addLine(head=True) for i,fName in enumerate(fieldNames): f=fields[fName] try: if f["internalField"].isBinary(): val=f["internalField"].binaryString() else: cont=str(f["internalField"]) if cont.find("\n")>=0: val=cont[:cont.find("\n")]+"..." else: val=cont except KeyError: val="Not a field file" tab[i+1]=[fName,val] print_(tab) if self.opts.processorMatrix: print_(ReST.heading("Processor matrix")) if sol.nrProcs()<2: print_("This case is not decomposed") else: matrix=[ [0,]*sol.nrProcs() for i in range(sol.nrProcs())] for i,p in enumerate(sol.processorDirs()): try: bound=ParsedBoundaryDict(sol.boundaryDict(processor=p, region=theRegion, time=self.opts.time) ,treatBinaryAsASCII=self.opts.treatBinaryAsASCII) except IOError: bound=ParsedBoundaryDict(sol.boundaryDict(processor=p, treatBinaryAsASCII=self.opts.treatBinaryAsASCII, region=theRegion) ,treatBinaryAsASCII=self.opts.treatBinaryAsASCII) for j in range(sol.nrProcs()): name="procBoundary%dto%d" %(j,i) name2="procBoundary%dto%d" %(i,j) if name in bound: matrix[i][j]=bound[name]["nFaces"] if name2 in bound: matrix[i][j]=bound[name2]["nFaces"] print_("Matrix of processor interactions (faces)") print_() tab=ReST.table() tab[0]=["CPU"]+list(range(sol.nrProcs())) tab.addLine(head=True) for i,col in enumerate(matrix): tab[i+1]=[i]+matrix[i] print_(tab) if self.opts.linearSolvers: print_(ReST.heading("Linear Solvers")) linTable=ReST.table() fvSol=ParsedParameterFile(path.join(sol.systemDir(),"fvSolution"), treatBinaryAsASCII=self.opts.treatBinaryAsASCII) allInfo={} for sName in fvSol["solvers"]: raw=fvSol["solvers"][sName] info={} if type(raw) in [dict,DictProxy]: # fvSolution format in 1.7 info["solver"]=raw["solver"] solverData=raw else: info["solver"]=raw[0] solverData=raw[1] if type(solverData) in [dict,DictProxy]: try: info["tolerance"]=solverData["tolerance"] except KeyError: info["tolerance"]=1. try: info["relTol"]=solverData["relTol"] except KeyError: info["relTol"]=0. else: # the old (pre-1.5) fvSolution-format info["tolerance"]=solverData info["relTol"]=raw[2] allInfo[sName]=info linTable[0]=["Name","Solver","Abs. Tolerance","Relative Tol."] linTable.addLine(head=True) nr=0 for n,i in iteritems(allInfo): nr+=1 linTable[nr]=(n,i["solver"],i["tolerance"],i["relTol"]) print_(linTable) if self.opts.relaxationFactors: print_(ReST.heading("Relaxation")) fvSol=ParsedParameterFile(path.join(sol.systemDir(),"fvSolution"), treatBinaryAsASCII=self.opts.treatBinaryAsASCII) if "relaxationFactors" in fvSol: tab=ReST.table() tab[0]=["Name","Factor"] tab.addLine(head=True) nr=0 for n,f in iteritems(fvSol["relaxationFactors"]): nr+=1 tab[nr]=[n,f] print_(tab) else: print_("No relaxation factors defined for this case")
def reportRunnerData(self,run): if self.opts.reportRunnerData: try: data=run.data["analyzed"] except KeyError: self.error("No analyzed data") print "\n Analyzed data:" print helper=RestructuredTextHelper(RestructuredTextHelper.LevelSubSubSection) for n,d in data.iteritems(): table=helper.table() heads=["Descrition","value"] table[0]=heads table.addLine(head=True) lNr=1 for k,v in d.iteritems(): table[(lNr,0)]=k table[(lNr,1)]=v lNr+=1 print helper.heading(n),table if self.opts.reportRunnerData: table=ReSTTable() heads=["Descrition","value"] table[0]=heads table.addLine(head=True) lNr=1 done=["analyzed"] def addLine(key,description): if key in run.data: table[(lNr,0)]=description table[(lNr,1)]=run.data[key] done.append(key) return lNr+1 return lNr lNr=addLine("time","Last simulation time") lNr=addLine("stepNr","Number of timesteps") lNr=addLine("lines","Lines written to stdout") lNr=addLine("warnings","Number of warnings") table.addLine() for k,v in run.data.iteritems(): if k not in done: table[(lNr,0)]=k table[(lNr,1)]=v lNr+=1 print "\n Runner data:" print print table if self.opts.dumpRunnerData: print "\n Runner data:",run.data
def run(self): sources = None if "stdin" in self.opts.__dict__: if self.opts.stdin: if len(self.parser.getArgs()) > 0: self.error( "If --from-stdin specified no arguments are allowed but we have", self.parser.getArgs()) sources = [STLFile(sys.stdin)] if sources == None: sources = [STLFile(f) for f in self.parser.getArgs()] if self.cmdname in ["remove", "merge"]: if len(sources) != 1: self.error("Only one input allowed for", self.cmdname) if self.cmdname in ["remove", "merge"]: if len(self.opts.patchExpr) == 0 and len( self.opts.patchNames) == 0: self.error( "Neither --patch-name nor --select-expression specified") for e in self.opts.patchExpr: expr = re.compile(e) for s in sources: for p in s.patchInfo(): if expr.match(p["name"]): self.opts.patchNames.append(p["name"]) if len(self.opts.patchNames) == 0: self.error("No patches fit the provided regular expressions") if self.cmdname in ["remove", "join", "merge"]: # Check whether output is correct if self.opts.stdout and self.opts.stlFile: self.error( "Can't specify --to-stdout and --stl-file at the same time" ) if self.opts.stlFile: if path.exists(self.opts.stlFile): if not self.opts.forceWrite: self.error( "File", self.opts.stlFile, "does allready exist. Use --force-write to overwrite" ) outputTo = self.opts.stlFile elif self.opts.stdout: outputTo = sys.stdout else: self.error("Specify either --to-stdout or --stld-file") rst = RestructuredTextHelper() if self.cmdname == "names": print_( rst.buildHeading("Patch names", level=RestructuredTextHelper.LevelSection)) for s in sources: print_( rst.buildHeading( s.filename(), level=RestructuredTextHelper.LevelSubSection)) for p in s.patchInfo(): print_(p["name"]) elif self.cmdname == "info": print_( rst.buildHeading("Patch info", level=RestructuredTextHelper.LevelSection)) for s in sources: print_( rst.buildHeading( s.filename(), level=RestructuredTextHelper.LevelSubSection)) tab = rst.table() tab[0] = ["name", "facets", "range in file", "bounding box"] tab.addLine(head=True) for i, p in enumerate(s.patchInfo()): tab[(i + 1, 0)] = p["name"] tab[(i + 1, 1)] = p["facets"] tab[(i + 1, 2)] = "%d-%d" % (p["start"], p["end"]) tab[(i + 1, 3)] = "(%g %g %g) - (%g %g %g)" % tuple(p["min"] + p["max"]) print_(tab) elif self.cmdname == "join": result = STLFile() for s in sources: result += s result.writeTo(outputTo) elif self.cmdname == "remove": s = sources[0] s.erasePatches(self.opts.patchNames) s.writeTo(outputTo) elif self.cmdname == "merge": if self.opts.newName == None: self.error("Specify --new-patch-name") s = sources[0] s.mergePatches(self.opts.patchNames, self.opts.newName) s.writeTo(outputTo) else: self.error("Unimplemented subcommand", self.cmdname)
def doRegion(self, theRegion): ReST = RestructuredTextHelper(defaultHeading=self.opts.headingLevel) if self.opts.allRegions: print ReST.buildHeading("Region: ", theRegion, level=self.opts.headingLevel - 1) sol = SolutionDirectory( self.parser.getArgs()[0], archive=None, parallel=self.opts.parallel, paraviewLink=False, region=theRegion ) if self.opts.all: self.opts.caseSize = True self.opts.shortBCreport = True self.opts.longBCreport = True self.opts.dimensions = True self.opts.internal = True self.opts.linearSolvers = True self.opts.relaxationFactors = True self.opts.processorMatrix = True self.opts.decomposition = True if self.opts.time: try: self.opts.time = sol.timeName(sol.timeIndex(self.opts.time, minTime=True)) except IndexError: error("The specified time", self.opts.time, "doesn't exist in the case") print "Using time t=" + self.opts.time + "\n" needsPolyBoundaries = False needsInitialTime = False if self.opts.longBCreport: needsPolyBoundaries = True needsInitialTime = True if self.opts.shortBCreport: needsPolyBoundaries = True needsInitialTime = True if self.opts.dimensions: needsInitialTime = True if self.opts.internal: needsInitialTime = True if self.opts.decomposition: needsPolyBoundaries = True defaultProc = None if self.opts.parallel: defaultProc = 0 if needsPolyBoundaries: proc = None boundary = BoundaryDict(sol.name, region=theRegion, time=self.opts.time, processor=defaultProc) boundMaxLen = 0 boundaryNames = [] for b in boundary: if b.find("procBoundary") != 0: boundaryNames.append(b) if self.opts.patches != None: tmp = boundaryNames boundaryNames = [] for b in tmp: for p in self.opts.patches: if fnmatch(b, p): boundaryNames.append(b) break if self.opts.expatches != None: tmp = boundaryNames boundaryNames = [] for b in tmp: keep = True for p in self.opts.expatches: if fnmatch(b, p): keep = False break if keep: boundaryNames.append(b) for b in boundaryNames: boundMaxLen = max(boundMaxLen, len(b)) boundaryNames.sort() if self.opts.time == None: procTime = "constant" else: procTime = self.opts.time if needsInitialTime: fields = {} if self.opts.time == None: try: time = sol.timeName(0) except IndexError: error("There is no timestep in the case") else: time = self.opts.time tDir = sol[time] nameMaxLen = 0 for f in tDir: try: fields[f.baseName()] = f.getContent( listLengthUnparsed=self.opts.longlist, doMacroExpansion=self.opts.doMacros ) nameMaxLen = max(nameMaxLen, len(f.baseName())) except PyFoamParserError, e: warning("Couldn't parse", f.name, "because of an error:", e, " -> skipping") fieldNames = fields.keys() fieldNames.sort()
def run(self): sources=None if "stdin" in self.opts.__dict__: if self.opts.stdin: if len(self.parser.getArgs())>0: self.error("If --from-stdin specified no arguments are allowed but we have",self.parser.getArgs()) sources=[STLFile(sys.stdin)] if sources==None: sources=[STLFile(f) for f in self.parser.getArgs()] if self.cmdname in ["remove","merge"]: if len(sources)!=1: self.error("Only one input allowed for",self.cmdname) if self.cmdname in ["remove","merge"]: if len(self.opts.patchExpr)==0 and len(self.opts.patchNames)==0: self.error("Neither --patch-name nor --select-expression specified") for e in self.opts.patchExpr: expr=re.compile(e) for s in sources: for p in s.patchInfo(): if expr.match(p["name"]): self.opts.patchNames.append(p["name"]) if len(self.opts.patchNames)==0: self.error("No patches fit the provided regular expressions") if self.cmdname in ["remove","join","merge"]: # Check whether output is correct if self.opts.stdout and self.opts.stlFile: self.error("Can't specify --to-stdout and --stl-file at the same time") if self.opts.stlFile: if path.exists(self.opts.stlFile): if not self.opts.forceWrite: self.error("File",self.opts.stlFile,"does allready exist. Use --force-write to overwrite") outputTo=self.opts.stlFile elif self.opts.stdout: outputTo=sys.stdout else: self.error("Specify either --to-stdout or --stld-file") rst=RestructuredTextHelper() if self.cmdname=="names": print_(rst.buildHeading("Patch names",level=RestructuredTextHelper.LevelSection)) for s in sources: print_(rst.buildHeading(s.filename(),level=RestructuredTextHelper.LevelSubSection)) for p in s.patchInfo(): print_(p["name"]) elif self.cmdname=="info": print_(rst.buildHeading("Patch info",level=RestructuredTextHelper.LevelSection)) for s in sources: print_(rst.buildHeading(s.filename(),level=RestructuredTextHelper.LevelSubSection)) tab=rst.table() tab[0]=["name","facets","range in file","bounding box"] tab.addLine(head=True) for i,p in enumerate(s.patchInfo()): tab[(i+1,0)]=p["name"] tab[(i+1,1)]=p["facets"] tab[(i+1,2)]="%d-%d" % (p["start"],p["end"]) tab[(i+1,3)]="(%g %g %g) - (%g %g %g)" % tuple(p["min"]+p["max"]) print_(tab) elif self.cmdname=="join": result=STLFile() for s in sources: result+=s result.writeTo(outputTo) elif self.cmdname=="remove": s=sources[0] s.erasePatches(self.opts.patchNames) s.writeTo(outputTo) elif self.cmdname=="merge": if self.opts.newName==None: self.error("Specify --new-patch-name") s=sources[0] s.mergePatches(self.opts.patchNames,self.opts.newName) s.writeTo(outputTo) else: self.error("Unimplemented subcommand",self.cmdname)
def prepare(self, sol, cName=None, overrideParameters=None, numberOfProcessors=None): """Do the actual preparing :param numberOfProcessors: If set this overrides the value set in the command line""" if cName == None: cName = sol.name if self.opts.onlyVariables: self.opts.verbose = True vals = {} vals, self.metaData = self.getDefaultValues(cName) vals.update( self.addDictValues( "System", "Automatically defined values", { "casePath": '"' + path.abspath(cName) + '"', "caseName": '"' + path.basename(path.abspath(cName)) + '"', "foamVersion": foamVersion(), "foamFork": foamFork(), "numberOfProcessors": numberOfProcessors if numberOfProcessors != None else self.opts.numberOfProcessors })) if len(self.opts.extensionAddition) > 0: vals.update( self.addDictValues( "ExtensionAdditions", "Additional extensions to be processed", dict((e, True) for e in self.opts.extensionAddition))) valsWithDefaults = set(vals.keys()) self.info("Looking for template values", cName) for f in self.opts.valuesDicts: self.info("Reading values from", f) vals.update( ParsedParameterFile(f, noHeader=True, doMacroExpansion=True).getValueDict()) setValues = {} for v in self.opts.values: self.info("Updating values", v) vals.update(eval(v)) setValues.update(eval(v)) if overrideParameters: vals.update(overrideParameters) unknownValues = set(vals.keys()) - valsWithDefaults if len(unknownValues) > 0: self.warning("Values for which no default was specified: " + ", ".join(unknownValues)) if self.opts.verbose and len(vals) > 0: print_("\nUsed values\n") nameLen = max(len("Name"), max(*[len(k) for k in vals.keys()])) format = "%%%ds - %%s" % nameLen print_(format % ("Name", "Value")) print_("-" * 40) for k, v in sorted(iteritems(vals)): print_(format % (k, v)) print_("") else: self.info("\nNo values specified\n") self.checkCorrectOptions(vals) derivedScript = path.join(cName, self.opts.derivedParametersScript) derivedAdded = None derivedChanged = None if path.exists(derivedScript): self.info("Deriving variables in script", derivedScript) scriptText = open(derivedScript).read() glob = {} oldVals = vals.copy() exec_(scriptText, glob, vals) derivedAdded = [] derivedChanged = [] for k, v in iteritems(vals): if k not in oldVals: derivedAdded.append(k) elif vals[k] != oldVals[k]: derivedChanged.append(k) if len(derivedChanged) > 0 and ( not self.opts.allowDerivedChanges and not configuration().getboolean("PrepareCase", "AllowDerivedChanges")): self.error( self.opts.derivedParametersScript, "changed values of", " ".join(derivedChanged), "\nTo allow this set --allow-derived-changes or the configuration item 'AllowDerivedChanges'" ) if len(derivedAdded) > 0: self.info("Added values:", " ".join(derivedAdded)) if len(derivedChanged) > 0: self.info("Changed values:", " ".join(derivedChanged)) if len(derivedAdded) == 0 and len(derivedChanged) == 0: self.info("Nothing added or changed") else: self.info("No script", derivedScript, "for derived values") if self.opts.onlyVariables: return self.__writeToStateFile(sol, "Starting") if self.opts.doClear: self.info("Clearing", cName) self.__writeToStateFile(sol, "Clearing") sol.clear(processor=True, pyfoam=True, vtk=True, removeAnalyzed=True, keepParallel=False, clearHistory=False, clearParameters=True, additional=["postProcessing"]) self.__writeToStateFile(sol, "Done clearing") if self.opts.writeParameters: fName = path.join(cName, self.parameterOutFile) self.info("Writing parameters to", fName) with WriteParameterFile(fName, noHeader=True) as w: w.content.update(vals, toString=True) w["foamVersion"] = vals["foamVersion"] w.writeFile() if self.opts.writeReport: fName = path.join(cName, self.parameterOutFile + ".rst") self.info("Writing report to", fName) with open(fName, "w") as w: helper = RestructuredTextHelper(defaultHeading=1) w.write(".. title:: " + self.__strip(vals["caseName"]) + "\n") w.write(".. sectnum::\n") w.write(".. header:: " + self.__strip(vals["caseName"]) + "\n") w.write(".. header:: " + time.asctime() + "\n") w.write(".. footer:: ###Page### / ###Total###\n\n") w.write("Parameters set in case directory " + helper.literal(self.__strip(vals["casePath"])) + " at " + helper.emphasis(time.asctime()) + "\n\n") w.write(".. contents::\n\n") if len(self.opts.valuesDicts): w.write(helper.heading("Parameter files")) w.write("Parameters read from files\n\n") w.write( helper.enumerateList([ helper.literal(f) for f in self.opts.valuesDicts ])) w.write("\n") if len(setValues) > 0: w.write(helper.heading("Overwritten parameters")) w.write( "These parameters were set from the command line\n\n") w.write(helper.definitionList(setValues)) w.write("\n") w.write(helper.heading("Parameters with defaults")) w.write(self.makeReport(vals)) if len(unknownValues) > 0: w.write(helper.heading("Unspecified parameters")) w.write( "If these parameters are actually used then specify them in " + helper.literal(self.defaultParameterFile) + "\n\n") tab = helper.table(True) for u in unknownValues: tab.addRow(u) tab.addItem("Value", vals[u]) w.write(str(tab)) if not derivedAdded is None: w.write(helper.heading("Derived Variables")) w.write("Script with derived Parameters" + helper.literal(derivedScript) + "\n\n") if len(derivedAdded) > 0: w.write("These values were added:\n") tab = helper.table(True) for a in derivedAdded: tab.addRow(a) tab.addItem("Value", str(vals[a])) w.write(str(tab)) if len(derivedChanged) > 0: w.write("These values were changed:\n") tab = helper.table(True) for a in derivedChanged: tab.addRow(a) tab.addItem("Value", str(vals[a])) tab.addItem("Old", str(oldVals[a])) w.write(str(tab)) w.write("The code of the script:\n") w.write(helper.code(scriptText)) self.addToCaseLog(cName) for over in self.opts.overloadDirs: self.info("Overloading files from", over) self.__writeToStateFile(sol, "Overloading") self.overloadDir(sol.name, over) self.__writeToStateFile(sol, "Initial") zeroOrig = path.join(sol.name, "0.org") hasOrig = path.exists(zeroOrig) cleanZero = True if not hasOrig: self.info("Not going to clean '0'") self.opts.cleanDirectories.remove("0") cleanZero = False if self.opts.doCopy: if hasOrig: self.info("Found 0.org. Clearing 0") zeroDir = path.join(sol.name, "0") if path.exists(zeroDir): rmtree(zeroDir) else: self.info("No 0-directory") self.info("") else: cleanZero = False if self.opts.doTemplates: self.__writeToStateFile(sol, "Templates") self.searchAndReplaceTemplates( sol.name, vals, self.opts.templateExt, ignoreDirectories=self.opts.ignoreDirectories) self.info("") backupZeroDir = None if self.opts.doMeshCreate: self.__writeToStateFile(sol, "Meshing") if self.opts.meshCreateScript: scriptName = path.join(sol.name, self.opts.meshCreateScript) if not path.exists(scriptName): self.error("Script", scriptName, "does not exist") elif path.exists(path.join(sol.name, self.defaultMeshCreate)): scriptName = path.join(sol.name, self.defaultMeshCreate) else: scriptName = None if scriptName: self.info("Executing", scriptName, "for mesh creation") if self.opts.verbose: echo = "Mesh: " else: echo = None self.executeScript(scriptName, workdir=sol.name, echo=echo) else: self.info( "No script for mesh creation found. Looking for 'blockMeshDict'" ) if sol.blockMesh() != "": self.info(sol.blockMesh(), "found. Executing 'blockMesh'") bm = BasicRunner(argv=["blockMesh", "-case", sol.name]) bm.start() if not bm.runOK(): self.error("Problem with blockMesh") for r in sol.regions(): self.info("Checking region", r) s = SolutionDirectory(sol.name, region=r, archive=None, paraviewLink=False) if s.blockMesh() != "": self.info(s.blockMesh(), "found. Executing 'blockMesh'") bm = BasicRunner(argv=[ "blockMesh", "-case", sol.name, "-region", r ]) bm.start() if not bm.runOK(): self.error("Problem with blockMesh") self.info("") if cleanZero and path.exists(zeroDir): self.warning("Mesh creation recreated 0-directory") if self.opts.keepZeroDirectoryFromMesh: backupZeroDir = zeroDir + ".bakByPyFoam" self.info("Backing up", zeroDir, "to", backupZeroDir) move(zeroDir, backupZeroDir) else: self.info("Data in", zeroDir, "will be removed") self.__writeToStateFile(sol, "Done Meshing") if self.opts.doCopy: self.__writeToStateFile(sol, "Copying") self.copyOriginals(sol.name) self.info("") if backupZeroDir: self.info("Copying backups from", backupZeroDir, "to", zeroDir) self.overloadDir(zeroDir, backupZeroDir) self.info("Removing backup", backupZeroDir) rmtree(backupZeroDir) if self.opts.doPostTemplates: self.__writeToStateFile(sol, "Post-templates") self.searchAndReplaceTemplates( sol.name, vals, self.opts.postTemplateExt, ignoreDirectories=self.opts.ignoreDirectories) self.info("") if self.opts.doCaseSetup: self.__writeToStateFile(sol, "Case setup") if self.opts.caseSetupScript: scriptName = path.join(sol.name, self.opts.caseSetupScript) if not path.exists(scriptName): self.error("Script", scriptName, "does not exist") elif path.exists(path.join(sol.name, self.defaultCaseSetup)): scriptName = path.join(sol.name, self.defaultCaseSetup) else: scriptName = None if scriptName: self.info("Executing", scriptName, "for case setup") if self.opts.verbose: echo = "Case:" else: echo = None self.executeScript(scriptName, workdir=sol.name, echo=echo) elif path.exists(path.join(sol.name, "system", "setFieldsDict")): self.info( "So setup script found. But 'setFieldsDict'. Executing setFields" ) sf = BasicRunner(argv=["setFields", "-case", sol.name]) sf.start() if not sf.runOK(): self.error("Problem with setFields") else: self.info("No script for case-setup found. Nothing done") self.info("") self.__writeToStateFile(sol, "Done case setup") if self.opts.doFinalTemplates: self.__writeToStateFile(sol, "Final templates") self.searchAndReplaceTemplates( sol.name, vals, self.opts.finalTemplateExt, ignoreDirectories=self.opts.ignoreDirectories) if self.opts.doTemplateClean: self.info("Clearing templates") for d in self.opts.cleanDirectories: for e in [ self.opts.templateExt, self.opts.postTemplateExt, self.opts.finalTemplateExt ]: self.cleanExtension(path.join(sol.name, d), e) self.info("") self.info("Case setup finished") self.__writeToStateFile(sol, "Finished OK")
def reportRunnerData(self,run): if self.opts.reportRunnerData: try: data=run.data["analyzed"] except KeyError: self.error("No analyzed data") print_("\n Analyzed data:") print_() helper=RestructuredTextHelper(RestructuredTextHelper.LevelSubSubSection) for n,d in iteritems(data): table=helper.table() heads=["Descrition","value"] table[0]=heads table.addLine(head=True) lNr=1 for k,v in iteritems(d): table[(lNr,0)]=k table[(lNr,1)]=v lNr+=1 print_(helper.heading(n),table) if self.opts.reportRunnerData: table=ReSTTable() heads=["Descrition","value"] table[0]=heads table.addLine(head=True) lNr=1 done=["analyzed"] def addLine(key,description): if key in run.data: table[(lNr,0)]=description table[(lNr,1)]=run.data[key] done.append(key) return lNr+1 return lNr lNr=addLine("time","Last simulation time") lNr=addLine("stepNr","Number of timesteps") lNr=addLine("lines","Lines written to stdout") lNr=addLine("warnings","Number of warnings") table.addLine() for k,v in iteritems(run.data): if k not in done: table[(lNr,0)]=k table[(lNr,1)]=v lNr+=1 print_("\n Runner data:") print_() print_(table) if self.opts.dumpRunnerData: print_("\n Runner data:",run.data)
def prepare(self,sol, cName=None, overrideParameters=None, numberOfProcessors=None): """Do the actual preparing @param numberOfProcessors: If set this overrides the value set in the command line""" if cName==None: cName=sol.name if self.opts.onlyVariables: self.opts.verbose=True vals={} vals,self.metaData=self.getDefaultValues(cName) vals.update(self.addDictValues("System", "Automatically defined values", { "casePath" : '"'+path.abspath(cName)+'"', "caseName" : '"'+path.basename(path.abspath(cName))+'"', "foamVersion" : foamVersion(), "foamFork" : foamFork(), "numberOfProcessors" : numberOfProcessors if numberOfProcessors!=None else self.opts.numberOfProcessors })) if len(self.opts.extensionAddition)>0: vals.update(self.addDictValues("ExtensionAdditions", "Additional extensions to be processed", dict((e,True) for e in self.opts.extensionAddition))) valsWithDefaults=set(vals.keys()) self.info("Looking for template values",cName) for f in self.opts.valuesDicts: self.info("Reading values from",f) vals.update(ParsedParameterFile(f, noHeader=True, doMacroExpansion=True).getValueDict()) setValues={} for v in self.opts.values: self.info("Updating values",v) vals.update(eval(v)) setValues.update(eval(v)) if overrideParameters: vals.update(overrideParameters) unknownValues=set(vals.keys())-valsWithDefaults if len(unknownValues)>0: self.warning("Values for which no default was specified: "+ ", ".join(unknownValues)) if self.opts.verbose and len(vals)>0: print_("\nUsed values\n") nameLen=max(len("Name"), max(*[len(k) for k in vals.keys()])) format="%%%ds - %%s" % nameLen print_(format % ("Name","Value")) print_("-"*40) for k,v in sorted(iteritems(vals)): print_(format % (k,v)) print_("") else: self.info("\nNo values specified\n") self.checkCorrectOptions(vals) derivedScript=path.join(cName,self.opts.derivedParametersScript) if path.exists(derivedScript): self.info("Deriving variables in script",derivedScript) scriptText=open(derivedScript).read() glob={} oldVals=vals.copy() exec_(scriptText,glob,vals) added=[] changed=[] for k,v in iteritems(vals): if k not in oldVals: added.append(k) elif vals[k]!=oldVals[k]: changed.append(k) if len(changed)>0 and (not self.opts.allowDerivedChanges and not configuration().getboolean("PrepareCase","AllowDerivedChanges")): self.error(self.opts.derivedParametersScript, "changed values of"," ".join(changed), "\nTo allow this set --allow-derived-changes or the configuration item 'AllowDerivedChanges'") if len(added)>0: self.info("Added values:"," ".join(added)) if len(changed)>0: self.info("Changed values:"," ".join(changed)) if len(added)==0 and len(changed)==0: self.info("Nothing added or changed") else: self.info("No script",derivedScript,"for derived values") if self.opts.onlyVariables: return if self.opts.doClear: self.info("Clearing",cName) sol.clear(processor=True, pyfoam=True, vtk=True, removeAnalyzed=True, keepParallel=False, clearHistory=False, clearParameters=True, additional=["postProcessing"]) if self.opts.writeParameters: fName=path.join(cName,self.parameterOutFile) self.info("Writing parameters to",fName) with WriteParameterFile(fName,noHeader=True) as w: w.content.update(vals,toString=True) w["foamVersion"]=vals["foamVersion"] w.writeFile() if self.opts.writeReport: fName=path.join(cName,self.parameterOutFile+".rst") self.info("Writing report to",fName) with open(fName,"w") as w: helper=RestructuredTextHelper(defaultHeading=1) w.write(".. title:: "+self.__strip(vals["caseName"])+"\n") w.write(".. sectnum::\n") w.write(".. header:: "+self.__strip(vals["caseName"])+"\n") w.write(".. header:: "+time.asctime()+"\n") w.write(".. footer:: ###Page### / ###Total###\n\n") w.write("Parameters set in case directory "+ helper.literal(self.__strip(vals["casePath"]))+" at "+ helper.emphasis(time.asctime())+"\n\n") w.write(".. contents::\n\n") if len(self.opts.valuesDicts): w.write(helper.heading("Parameter files")) w.write("Parameters read from files\n\n") w.write(helper.enumerateList([helper.literal(f) for f in self.opts.valuesDicts])) w.write("\n") if len(setValues)>0: w.write(helper.heading("Overwritten parameters")) w.write("These parameters were set from the command line\n\n") w.write(helper.definitionList(setValues)) w.write("\n") w.write(helper.heading("Parameters with defaults")) w.write(self.makeReport(vals)) if len(unknownValues)>0: w.write(helper.heading("Unspecified parameters")) w.write("If these parameters are actually used then specify them in "+ helper.literal(self.defaultParameterFile)+"\n\n") tab=helper.table(True) for u in unknownValues: tab.addRow(u) tab.addItem("Value",vals[u]) w.write(str(tab)) self.addToCaseLog(cName) for over in self.opts.overloadDirs: self.info("Overloading files from",over) self.overloadDir(sol.name,over) zeroOrig=path.join(sol.name,"0.org") hasOrig=path.exists(zeroOrig) cleanZero=True if not hasOrig: self.info("Not going to clean '0'") self.opts.cleanDirectories.remove("0") cleanZero=False if self.opts.doCopy: if hasOrig: self.info("Found 0.org. Clearing 0") zeroDir=path.join(sol.name,"0") if path.exists(zeroDir): rmtree(zeroDir) else: self.info("No 0-directory") self.info("") else: cleanZero=False if self.opts.doTemplates: self.searchAndReplaceTemplates(sol.name, vals, self.opts.templateExt) self.info("") backupZeroDir=None if self.opts.doMeshCreate: if self.opts.meshCreateScript: scriptName=path.join(sol.name,self.opts.meshCreateScript) if not path.exists(scriptName): self.error("Script",scriptName,"does not exist") elif path.exists(path.join(sol.name,self.defaultMeshCreate)): scriptName=path.join(sol.name,self.defaultMeshCreate) else: scriptName=None if scriptName: self.info("Executing",scriptName,"for mesh creation") if self.opts.verbose: echo="Mesh: " else: echo=None result="".join(execute([scriptName],workdir=sol.name,echo=echo)) open(scriptName+".log","w").write(result) else: self.info("No script for mesh creation found. Looking for 'blockMeshDict'") if sol.blockMesh()!="": self.info(sol.blockMesh(),"found. Executing 'blockMesh'") bm=BasicRunner(argv=["blockMesh","-case",sol.name]) bm.start() if not bm.runOK(): self.error("Problem with blockMesh") for r in sol.regions(): self.info("Checking region",r) s=SolutionDirectory(sol.name,region=r, archive=None,paraviewLink=False) if s.blockMesh()!="": self.info(s.blockMesh(),"found. Executing 'blockMesh'") bm=BasicRunner(argv=["blockMesh","-case",sol.name, "-region",r]) bm.start() if not bm.runOK(): self.error("Problem with blockMesh") self.info("") if cleanZero and path.exists(zeroDir): self.warning("Mesh creation recreated 0-directory") if self.opts.keepZeroDirectoryFromMesh: backupZeroDir=zeroDir+".bakByPyFoam" self.info("Backing up",zeroDir,"to",backupZeroDir) move(zeroDir,backupZeroDir) else: self.info("Data in",zeroDir,"will be removed") if self.opts.doCopy: self.copyOriginals(sol.name) self.info("") if backupZeroDir: self.info("Copying backups from",backupZeroDir,"to",zeroDir) self.overloadDir(zeroDir,backupZeroDir) self.info("Removing backup",backupZeroDir) rmtree(backupZeroDir) if self.opts.doPostTemplates: self.searchAndReplaceTemplates(sol.name, vals, self.opts.postTemplateExt) self.info("") if self.opts.doCaseSetup: if self.opts.caseSetupScript: scriptName=path.join(sol.name,self.opts.caseSetupScript) if not path.exists(scriptName): self.error("Script",scriptName,"does not exist") elif path.exists(path.join(sol.name,self.defaultCaseSetup)): scriptName=path.join(sol.name,self.defaultCaseSetup) else: scriptName=None if scriptName: self.info("Executing",scriptName,"for case setup") if self.opts.verbose: echo="Case:" else: echo=None result="".join(execute([scriptName],workdir=sol.name,echo=echo)) open(scriptName+".log","w").write(result) else: self.info("No script for case-setup found. Nothing done") self.info("") if self.opts.doFinalTemplates: self.searchAndReplaceTemplates(sol.name, vals, self.opts.finalTemplateExt) if self.opts.doTemplateClean: self.info("Clearing templates") for d in self.opts.cleanDirectories: for e in [self.opts.templateExt, self.opts.postTemplateExt, self.opts.finalTemplateExt]: self.cleanExtension(path.join(sol.name,d),e) self.info("") self.info("Case setup finished")