def __init__(self, dataset, dates, request): # ~~> inheritence self.slf2d = SELAFIN('') # surface self.slf2d.DATETIME = dates[0] # ~> Initialisation self.moddates = [datetime(*dates[0]), datetime(*dates[1])] status = '' self.request = request # ~> Establish connection self.connection = Connection(config['email'], config['key'], quiet=True, verbose=False) # ~> Verify connection user = self.connection.call("%s/%s" % (config['url'], "who-am-i")) print ' ~> access through username: %s\n' % ( user["full_name"] or "user '%s'" % user["uid"], ) # ~> Request dataset self.connection.submit("%s/%s/requests" % (config['url'], dataset), request) status = self.connection.status print ' ~> request has been', status # ~> Wait for remote processing while not self.connection.ready(): if status != self.connection.status: status = self.connection.status print ' ~> request remains', status, '...' self.connection.wait() # ~> Request completed print ' ~> request is now', self.connection.status self.connection.cleanup()
def add(self, typl, what): Caster.add(self, typl, what) # ~~> output from for 3D file if self.obtype == 'slf': if not self.oudata: self.oudata = SELAFIN() # ~~> unkonwn else: # TODO: raise exception print '... do not know how to write to this format: ' + self.obtype sys.exit(1)
def read_selafin(): #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ # Openning the selafin file # This gives the name of the variables, and their index number # slf = SELAFIN(input_file) # Getting coordinates x = slf.MESHX y = slf.MESHY # Getting Variables print 'Variables in ' + input_file + ' are:' for i in range(len(slf.VARNAMES)): print ' ', i, '-->', slf.VARNAMES[i] #for i,name in enumerate(slf.VARNAMES): # print ' ',i, ' - ',name # Get IKLE for mesh regularization ikle = np.array(slf.IKLE2) # total number of variables in the input file numvars = len(slf.VARNAMES) # total number of time records in the file nrecs = len(slf.tags["times"]) # an array of size nrecs with values of time steps in the input file times = slf.tags["times"] print "number of records in input file : " + str(nrecs) #print "Available time steps to choose from: " #for i in range(len(times)): # print str(times[i]) # return slf, x, y, ikle, numvars, nrecs, times
def __init__(self,SLFfileName,CLMfileName,SEQfileName='',splitCONLIM=False,DOMfileRoot=''): print '\n... Acquiring global files' # ~~> Acquire global CONLIM file print ' +> CONLIM file' self.clm = CONLIM(CLMfileName) self.isCONLIM = splitCONLIM # ~~> Acquire global SELAFIN file print ' +> SELAFIN file' self.slf = SELAFIN(SLFfileName) # ~~> Acquire global SELAFIN file if SEQfileName != '': print ' +> SEQUENCE file' self.NPARTS,self.NSPLIT,self.KSPLIT = self.getSplitFromSequence(np.array( getFileContent(SEQfileName), dtype='<i4' )) else: self.NPARTS,self.NSPLIT,self.KSPLIT = self.getSplitFromNodeValues('PROCESSORS') print '\n... Split by elements in ',self.NPARTS,' parts\n' # ~~> Clean inconsistencies in boundary segments self.IPOBO,self.NSPLIT,self.KSPLIT = self.setSplitForBoundaries(self.NSPLIT,self.clm.KFRGL,self.KSPLIT) self.PINTER,self.PNHALO,self.PNODDS = \ self.setSplitForElements( self.IPOBO,self.NPARTS,self.NSPLIT,self.KSPLIT ) self.slfn = self.copyCommonData() # ~~> Optional output file names self.isDOMAIN = DOMfileRoot
def __init__(self, fileName): # ~~> Read the steering file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ if not path.exists(fileName): print '... Could not file your DELWAQ file: ', fileName sys.exit(1) self.dwqList = self.parseDWQ(getFileContent(fileName)) # ~~> Read the geometry file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ fle = self.dwqList['grid-indices-file'] if not path.exists(fle): print '...Could not find the GEO file: ', fle sys.exit(1) self.geo = SELAFIN(fle) self.NPOIN3 = int(self.dwqList['grid-cells-first-direction']) if self.NPOIN3 != self.geo.NPOIN3: print '...In consistency in numbers with GEO file: ', self.NPOIN3, self.geo.NPOIN3 sys.exit(1) self.NSEG3 = int(self.dwqList['grid-cells-second-direction']) # ~~> Read the CONLIM file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ fle = self.dwqList['grid-coordinates-file'] if not path.exists(fle): print '...Could not find the CONLIM file: ', fle sys.exit(1) self.conlim = CONLIM(fle) # ~~> Time records ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ self.HYDRO0T = int(self.dwqList['hydrodynamic-start-time']) self.HYDROAT = int(self.dwqList['hydrodynamic-stop-time']) self.HYDRODT = int(self.dwqList['hydrodynamic-timestep']) self.HYDROIT = 1 + (self.HYDROAT - self.HYDRO0T) / self.HYDRODT self.HYDRO00 = 0 self.tfrom = self.HYDRO0T self.tstop = self.HYDROAT
def __init__(self, meshfile, **kwargs): '''Reading a Telemac mesh and converting the format and reprojecting etc''' self.meshfile = meshfile self.useropts = kwargs if self.fileExists(): self.slf = SELAFIN(meshfile) self.xmesh = self.slf.MESHX self.ymesh = self.slf.MESHY self.elements = self.slf.IKLE2 if "bcfile" in self.useropts: self.bcfile = kwargs["bcfile"] print(self.bcfile)
class Dumper3D(Caster): def __init__(self, caster, dump): Caster.__init__(self, { 'object': caster.object, 'obdata': caster.obdata }) self.obtype = dump['saveas'] # the type of file, 'slf' most probably self.oudata = None # the loaded SELAFIN object itself, most probably def add(self, typl, what): Caster.add(self, typl, what) # ~~> output from for 3D file if self.obtype == 'slf': if not self.oudata: self.oudata = SELAFIN() # ~~> unkonwn else: # TODO: raise exception print '... do not know how to write to this format: ' + self.obtype sys.exit(1) def save(self, fileName): self.oudata.putFileContent(fileName)
def __init__(self, SLFfileName, CLMfileName, SEQfileName='', splitCONLIM=False, DOMfileRoot=''): print '\n... Acquiring global files' # ~~> Acquire global CONLIM file print ' +> CONLIM file' self.clm = CONLIM(CLMfileName) self.isCONLIM = splitCONLIM # ~~> Acquire global SELAFIN file print ' +> SELAFIN file' self.slf = SELAFIN(SLFfileName) # ~~> Acquire global SELAFIN file if SEQfileName != '': print ' +> SEQUENCE file' self.NPARTS, self.NSPLIT, self.KSPLIT = self.getSplitFromSequence( np.array(getFileContent(SEQfileName), dtype='<i4')) else: self.NPARTS, self.NSPLIT, self.KSPLIT = self.getSplitFromNodeValues( 'PROCESSORS') print '\n... Split by elements in ', self.NPARTS, ' parts\n' # ~~> Clean inconsistencies in boundary segments self.IPOBO, self.NSPLIT, self.KSPLIT = self.setSplitForBoundaries( self.NSPLIT, self.clm.KFRGL, self.KSPLIT) self.PINTER,self.PNHALO,self.PNODDS = \ self.setSplitForElements( self.IPOBO,self.NPARTS,self.NSPLIT,self.KSPLIT ) self.slfn = self.copyCommonData() # ~~> Optional output file names self.isDOMAIN = DOMfileRoot
def draw(self,type,what,fig): if 'sortie' in type.lower(): # ~~> Load data sortie = getFileContent(what['file']) # ~~> Extract data data = getValueHistorySortie(sortie,what['vars']) # ~~> Deco # ~~> Draw data drawHistoryLines(plt,data,deco) elif 'SELAFIN' in type.upper(): # ~~> Load data slf = SELAFIN(what['file']) if what['type'] == 'history': # ~~> Extract data vars = subsetVariablesSLF(what["vars"],slf.VARNAMES) support = xyLocateMeshSLF(what["extract"],slf.NELEM3,slf.IKLE,slf.MESHX,slf.MESHY) data = getValueHistorySLF(slf.file,slf.tags,what['time'],support,slf.TITLE,slf.NVAR,slf.NPOIN3,vars) # ~~> Deco if what.has_key('roi'): if what['roi'] != []: deco['roi'] = what['roi'] # ~~> Draw data drawHistoryLines(plt,data,deco) elif what['type'] == 'v-section': # ~~> Extract data vars = subsetVariablesSLF(what["vars"],slf.VARNAMES) support = crossLocateMeshSLF(what["extract"],slf.NELEM3,slf.IKLE,slf.MESHX,slf.MESHY) data = getValuePolylineSLF(slf.file,slf.tags,what['time'],support,slf.TITLE,slf.NVAR,slf.NPOIN3,vars) # ~~> Deco deco['roi'] = [ [np.min(slf.MESHX),np.min(slf.MESHY)], [np.max(slf.MESHX),np.max(slf.MESHY)] ] if what.has_key('roi'): if what['roi'] != []: deco['roi'] = what['roi'] # ~~> Draw data drawPolylineLines(plt,data,deco) else: print '... do not know how to draw this type: ' + what['type'] else: print '... do not know how to draw this format: ' + type
def getHeaderJCOPE2(self, bounds): # ~~> inheritence self.slf3d = SELAFIN('') # slf3d self.slf2d = SELAFIN('') # slf2d surface print ' +> Set SELAFIN Variables' self.slf3d.TITLE = '' self.slf3d.NBV1 = 6 self.slf3d.NVAR = 6 self.slf3d.VARINDEX = range(self.slf3d.NVAR) self.slf3d.VARNAMES = ['ELEVATION Z ', \ 'SALINITY ','TEMPERATURE ', \ 'VELOCITY U ','VELOCITY V ','VELOCITY W '] self.slf3d.VARUNITS = ['M ', \ ' ',' ', \ 'M/S ','M/S ','M/S '] self.slf2d.TITLE = self.slf3d.TITLE self.slf2d.NBV1 = self.slf3d.NBV1 - 1 self.slf2d.NVAR = self.slf2d.NBV1 self.slf2d.VARINDEX = range(self.slf2d.NVAR) self.slf2d.VARNAMES = self.slf3d.VARNAMES[0:-1] self.slf2d.VARUNITS = self.slf3d.VARUNITS[0:-1] # ~~~~ Grid coordinates ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # ~~> the whole of the 2D grid sizes print ' +> Extract JCOPE2 sizes' # /!\ 't' gives me access to NPLAN in 3D jcope2data = self.experiments[0][0]['temp'] NX1D = jcope2data['lon'].shape[0] NY1D = jcope2data['lat'].shape[0] print ' +> Extract JCOPE2 mesh' lonX1D = jcope2data['lon'].data[0:NX1D] latY1D = jcope2data['lat'].data[0:NY1D] # ~~> no correction for lat,lon # ~~> subset for the SELAFIN print ' +> Set SELAFIN mesh' self.jcope2ilon = np.where( (lonX1D >= bounds[0][1]) * (lonX1D <= bounds[1][1]))[0] self.jcope2ilat = np.where( (latY1D >= bounds[0][0]) * (latY1D <= bounds[1][0]))[0] x = lonX1D[self.jcope2ilon] y = latY1D[self.jcope2ilat] NX1D = len(x) NY1D = len(y) # ~~~~ MESH sizes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set SELAFIN sizes' # ~~> 3D self.slf3d.NPLAN = jcope2data['lev'].shape[0] self.ZPLAN = jcope2data['lev'][ 0:self.slf3d.NPLAN][::-1] # I do not know any other way self.slf3d.NDP2 = 3 self.slf3d.NDP3 = 6 self.slf3d.NPOIN2 = NX1D * NY1D self.slf3d.NPOIN3 = self.slf3d.NPOIN2 * self.slf3d.NPLAN self.slf3d.NELEM2 = 2 * (NX1D - 1) * (NY1D - 1) self.slf3d.NELEM3 = self.slf3d.NELEM2 * (self.slf3d.NPLAN - 1) self.slf3d.IPARAM = [0, 0, 0, 0, 0, 0, self.slf3d.NPLAN, 0, 0, 0] # ~~> 2D self.slf2d.NPLAN = 1 self.slf2d.NDP2 = self.slf3d.NDP2 self.slf2d.NDP3 = self.slf2d.NDP2 self.slf2d.NPOIN2 = self.slf3d.NPOIN2 self.slf2d.NPOIN3 = self.slf2d.NPOIN2 self.slf2d.NELEM2 = self.slf3d.NELEM2 self.slf2d.NELEM3 = self.slf2d.NELEM2 self.slf2d.IPARAM = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0] # ~~~~ Connectivity ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set the default SELAFIN IKLE 3D' ielem = 0 pbar = ProgressBar(maxval=self.slf3d.NELEM3).start() self.slf3d.IKLE3 = np.zeros((self.slf3d.NELEM3, self.slf3d.NDP3), dtype=np.int) for k in range(1, self.slf3d.NPLAN): for i in range(1, NX1D): for j in range(1, NY1D): ipoin = (i - 1) * NY1D + j - 1 + (k - 1) * self.slf3d.NPOIN2 # ~~> first prism self.slf3d.IKLE3[ielem][0] = ipoin self.slf3d.IKLE3[ielem][1] = ipoin + NY1D self.slf3d.IKLE3[ielem][2] = ipoin + 1 self.slf3d.IKLE3[ielem][3] = ipoin + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][ 4] = ipoin + NY1D + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][5] = ipoin + 1 + self.slf3d.NPOIN2 ielem = ielem + 1 pbar.update(ielem) # ~~> second prism self.slf3d.IKLE3[ielem][0] = ipoin + NY1D self.slf3d.IKLE3[ielem][1] = ipoin + NY1D + 1 self.slf3d.IKLE3[ielem][2] = ipoin + 1 self.slf3d.IKLE3[ielem][ 3] = ipoin + NY1D + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][ 4] = ipoin + NY1D + 1 + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][5] = ipoin + 1 + self.slf3d.NPOIN2 ielem = ielem + 1 pbar.update(ielem) pbar.finish() self.slf2d.IKLE3 = np.compress( [True, True, True, False, False, False], self.slf3d.IKLE3[0:self.slf3d.NELEM2], axis=1) #.reshape((self.slf3d.NELEM2,self.slf3d.NDP2)) # ~~~~ Boundaries ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set SELAFIN IPOBO' pbar = ProgressBar(maxval=NX1D + NY1D).start() IPOB2 = np.zeros(self.slf3d.NPOIN2, dtype=np.int) # ~~> along the x-axis (lon) for i in range(NX1D): ipoin = i * NY1D IPOB2[ipoin] = i + 1 ipoin = i * NY1D - 1 IPOB2[ipoin] = 2 * NX1D + (NY1D - 2) - i pbar.update(i) # ~~> along the y-axis (alt) for i in range(1, NY1D): ipoin = i IPOB2[ipoin] = 2 * NX1D + 2 * (NY1D - 2) - i + 1 ipoin = NY1D * (NX1D - 1) + i IPOB2[ipoin] = NX1D + i pbar.update(i + NX1D) pbar.finish() # ~~~~ Connectivity ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # /!\ 'el' gives me access to the real mesh removing elements with -99 values print ' +> Mask the non-values from the SELAFIN IKLE' jcope2data = self.experiments[0][0]['el'] var = np.swapaxes( jcope2data['el'].data[0, 0, self.jcope2ilat[0]:self.jcope2ilat[-1] + 1, self.jcope2ilon[0]:self.jcope2ilon[-1] + 1][0], 1, 2).ravel() # ~> the elements you wish to keep MASK2 = self.slf2d.IKLE3[np.where( np.sum(np.in1d( self.slf2d.IKLE3, np.compress(var > -99, np.arange(len( var)))).reshape(self.slf2d.NELEM2, self.slf2d.NDP2), axis=1) == 3)] self.slf2d.NELEM2 = len(MASK2) self.slf2d.NELEM3 = self.slf2d.NELEM2 self.slf3d.NELEM2 = self.slf2d.NELEM2 self.slf3d.NELEM3 = self.slf3d.NELEM2 * (self.slf3d.NPLAN - 1) # ~~> re-numbering IKLE2 as a local connectivity matrix KNOLG, indices = np.unique(np.ravel(MASK2), return_index=True) KNOGL = dict(zip(KNOLG, range(len(KNOLG)))) self.MASK2 = np.in1d(np.arange(len(var)), KNOLG) self.MASK3 = np.tile(self.MASK2, self.slf3d.NPLAN) self.slf2d.IKLE2 = -np.ones_like(MASK2, dtype=np.int) for k in range(len(MASK2)): self.slf2d.IKLE2[k] = [ KNOGL[MASK2[k][0]], KNOGL[MASK2[k][1]], KNOGL[MASK2[k][2]] ] self.slf3d.NPOIN2 = len(KNOLG) self.slf3d.NPOIN3 = self.slf3d.NPOIN2 * self.slf3d.NPLAN self.slf2d.NPOIN2 = self.slf3d.NPOIN2 self.slf2d.NPOIN3 = self.slf2d.NPOIN2 # ~~> re-connecting the upper floors self.slf2d.IKLE3 = self.slf2d.IKLE2 self.slf3d.IKLE2 = self.slf2d.IKLE2 self.slf3d.IKLE3 = \ np.repeat(self.slf2d.NPOIN2*np.arange(self.slf3d.NPLAN-1),self.slf2d.NELEM2*self.slf3d.NDP3).reshape((self.slf2d.NELEM2*(self.slf3d.NPLAN-1),self.slf3d.NDP3)) + \ np.tile(np.add(np.tile(self.slf2d.IKLE2,2),np.repeat(self.slf2d.NPOIN2*np.arange(2),self.slf3d.NDP2)),(self.slf3d.NPLAN-1,1)) # ~~~~ Boundaries ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ self.slf2d.IPOB2 = IPOB2[self.MASK2] self.slf2d.IPOB3 = self.slf2d.IPOB2 self.slf3d.IPOB2 = self.slf2d.IPOB2 self.slf3d.IPOB3 = np.ravel( np.add( np.repeat(self.slf2d.IPOB2, self.slf3d.NPLAN).reshape( (self.slf2d.NPOIN2, self.slf3d.NPLAN)), self.slf2d.NPOIN2 * np.arange(self.slf3d.NPLAN)).T) # ~~~~ Mesh ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set SELAFIN mesh' self.slf3d.MESHX = np.tile(x, NY1D).reshape( NY1D, NX1D).T.ravel()[self.MASK2] + 0.042 self.slf3d.MESHY = np.tile(y, NX1D)[self.MASK2] + 0.042 self.slf2d.MESHX = self.slf3d.MESHX self.slf2d.MESHY = self.slf3d.MESHY
A script to map 2D or 3D outter model results into a SELAFIN, onto the one frame of contained SELAFIN file of your choosing (your MESH). ''')) parser.add_argument( "args",default='',nargs=3 ) options = parser.parse_args() # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< # ~~~~ slf new mesh ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ geoFile = options.args[0] if not path.exists(geoFile): print '... the provided geoFile does not seem to exist: '+geoFile+'\n\n' sys.exit(1) # Find corresponding (x,y) in corresponding new mesh print ' +> getting hold of the GEO file and of its bathymetry' geo = SELAFIN(geoFile) xys = np.vstack( (geo.MESHX,geo.MESHY) ).T bat = geo.getVariablesAt( 0,subsetVariablesSLF("BOTTOM: ",geo.VARNAMES)[0] )[0] # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< # ~~~~ slf existing res ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ slfFile = options.args[1] if not path.exists(slfFile): print '... the provided geoFile does not seem to exist: '+slfFile+'\n\n' sys.exit(1) slf = SELAFIN(slfFile) slf.setKDTree() slf.setMPLTri() print ' +> support extraction' # Extract triangles and weights in 2D
if i == (len(columns)-1) : csvF.write(str(columns[i][j])+'\n') else : csvF.write(str(columns[i][j])+',') csvF.close() return if __name__ == "__main__": ############################################################################ ##### Importing data ###### ############################################################################ # Times Series from Selafin file # 5113 is the node number # 0,1,2,8 are the variable indexes slf = SELAFIN("sis_foulness.slf") series = slf.getSERIES([5113],[0,1,2,8]) u = series[0][0] v = series[1][0] h = series[2][0] QSsuspension = series[3][0] # Experiment data from CSV file # always write the variable name in lower case csv = CSV() csv.getFileContent('fielddata.csv') t,QSexp = csv.getColumns('qs')
class splitSELAFIN(): def __init__(self, SLFfileName, CLMfileName, SEQfileName='', splitCONLIM=False, DOMfileRoot=''): print '\n... Acquiring global files' # ~~> Acquire global CONLIM file print ' +> CONLIM file' self.clm = CONLIM(CLMfileName) self.isCONLIM = splitCONLIM # ~~> Acquire global SELAFIN file print ' +> SELAFIN file' self.slf = SELAFIN(SLFfileName) # ~~> Acquire global SELAFIN file if SEQfileName != '': print ' +> SEQUENCE file' self.NPARTS, self.NSPLIT, self.KSPLIT = self.getSplitFromSequence( np.array(getFileContent(SEQfileName), dtype='<i4')) else: self.NPARTS, self.NSPLIT, self.KSPLIT = self.getSplitFromNodeValues( 'PROCESSORS') print '\n... Split by elements in ', self.NPARTS, ' parts\n' # ~~> Clean inconsistencies in boundary segments self.IPOBO, self.NSPLIT, self.KSPLIT = self.setSplitForBoundaries( self.NSPLIT, self.clm.KFRGL, self.KSPLIT) self.PINTER,self.PNHALO,self.PNODDS = \ self.setSplitForElements( self.IPOBO,self.NPARTS,self.NSPLIT,self.KSPLIT ) self.slfn = self.copyCommonData() # ~~> Optional output file names self.isDOMAIN = DOMfileRoot # Make a copy of common information for sub-meshes def copyCommonData(self): SLFn = SELAFIN('') # Meta data SLFn.TITLE = self.slf.TITLE SLFn.file = self.slf.file SLFn.IPARAM = self.slf.IPARAM # Time SLFn.DATETIME = self.slf.DATETIME SLFn.tags = self.slf.tags # Variables SLFn.NBV1 = self.slf.NBV1 SLFn.VARNAMES = self.slf.VARNAMES SLFn.VARUNITS = self.slf.VARUNITS SLFn.NBV2 = self.slf.NBV2 SLFn.CLDNAMES = self.slf.CLDNAMES SLFn.CLDUNITS = self.slf.CLDUNITS SLFn.NVAR = self.slf.NVAR SLFn.VARINDEX = range(self.slf.NVAR) # Unchanged numbers SLFn.NPLAN = self.slf.NPLAN SLFn.NDP = self.slf.NDP return SLFn # Split based on a sequence of parts, one for each element (result from METIS) def getSplitFromSequence(self, KSPLIT): # ~~> NPARTS is the number of parts /!\ does not check continuity vs. missing parts NPARTS = max(*KSPLIT) NSPLIT = np.zeros(self.slf.NPOIN3, dtype=np.int) for part in range(NPARTS): k = np.compress(KSPLIT == (part + 1), range(len(self.slf.IKLE))) NSPLIT[self.slf.IKLE[k]] = KSPLIT[k] return NPARTS, NSPLIT - 1, KSPLIT - 1 # Split based on the variable PROCESSORS, defined at the nodes def getSplitFromNodeValues(self, var): # ~~> Filter for 'PROCESSORS' as input to the getVariablesAt method i, vn = subsetVariablesSLF(var, self.slf.VARNAMES) if i == []: print '... Could not find ', var, ', you may need another split method' sys.exit() # ~~> NSPLIT is the interger value of the variable PROCESSORS (time frame 0) NSPLIT = np.array( \ getVariablesAt( self.slf.file,self.slf.tags,0,self.slf.NVAR,self.slf.NPOIN3,i )[0], \ dtype=np.int) # ~~> NPARTS is the number of parts /!\ does not check continuity vs. missing parts NPARTS = max(*NSPLIT) + 1 # User numbering NSPLIT starts from 0 KSPLIT = np.minimum(*(NSPLIT[self.slf.IKLE].T)) return NPARTS, NSPLIT, KSPLIT def setSplitForBoundaries(self, NSPLIT, KFRGL, KSPLIT): # ~~> Join up the global boundary nodes with the halo elements IPOBO = np.zeros(self.slf.NPOIN3, dtype=np.int) IPOBO[KFRGL.keys()] = np.array( KFRGL.values(), dtype=np.int) + 1 # this is so the nonzero search is easier # ~~> Cross check partition quality -- step 1 found = True nloop = 0 while found: found = False nloop += 1 for k in range(len(self.slf.IKLE)): e = self.slf.IKLE[k] if KSPLIT[k] != max(NSPLIT[e]): for p1, p2, p3 in zip([0, 1, 2], [1, 2, 0], [2, 0, 1]): if NSPLIT[e[p1]] != KSPLIT[k] and NSPLIT[ e[p2]] != KSPLIT[k]: if IPOBO[e[p1]] != 0 and IPOBO[e[p2]] != 0: print ' ~> correcting boundary segment at iteration: ', nloop, ( e[p1], e[p2]), k, KSPLIT[k], e, NSPLIT[e] NSPLIT[e[p1]] = NSPLIT[e[p3]] NSPLIT[e[p2]] = NSPLIT[e[p3]] KSPLIT[k] = NSPLIT[e[p3]] found = True # ~~> Cross check partition quality -- step 2 found = True nloop = 0 while found: found = False nloop += 1 for k in range(len(self.slf.IKLE)): e = self.slf.IKLE[k] if min(NSPLIT[e]) != max(NSPLIT[e]) and KSPLIT[k] != min( NSPLIT[e]): print ' ~> correcting internal segment at iteration: ', nloop, k, KSPLIT[ k], e, NSPLIT[e] KSPLIT[k] = min(NSPLIT[e]) found = True return IPOBO, NSPLIT, KSPLIT # Split based on the variable PROCESSORS, defined at the nodes def setSplitForElements(self, IPOBO, NPARTS, NSPLIT, KSPLIT): SNHALO = dict([(i, []) for i in range(NPARTS)]) PNODDS = dict([(i, []) for i in range(NPARTS)]) SINTER = dict([(i, []) for i in range(NPARTS)]) # ~~> Internal segments separating parts pbar = ProgressBar(maxval=len(self.slf.IKLE)).start() for k in range(len(self.slf.IKLE)): e = self.slf.IKLE[k] # Case 1: you are at an internal boundary element if KSPLIT[k] != max(NSPLIT[e]): for p1, p2 in zip([0, 1, 2], [1, 2, 0]): if NSPLIT[e[p1]] != KSPLIT[k] and NSPLIT[ e[p2]] != KSPLIT[k]: SINTER[KSPLIT[k]].append((e[p1], e[p2])) SINTER[min(NSPLIT[e[p1]], NSPLIT[e[p2]])].append( (e[p2], e[p1])) # Case 2: you may be at an external boundary element if np.count_nonzero(IPOBO[e]) > 1: for p1, p2 in zip([0, 1, 2], [1, 2, 0]): if IPOBO[e[p1]] != 0 and IPOBO[ e[p2]] != 0: # multiplier is not possible if IPOBO[e[p1]] + 1 == IPOBO[e[p2]]: SNHALO[KSPLIT[k]].append((e[p1], e[p2])) else: PNODDS[KSPLIT[k]].append([e[p1], e[p2]]) pbar.update(k) pbar.finish() # ~~> Clean-up of funny segments looping on themselves for part in range(NPARTS): # ~~> Quickly checking through to remove duplicate segments found = True while found: found = False INTER = np.array(SINTER[part], dtype=[('h', int), ('t', int)]) HEADT = np.argsort(INTER['h']) HLINK = np.searchsorted(INTER['h'][HEADT], INTER['t'][HEADT]) w = 0 while w < len(HLINK): if HLINK[w] < len(HLINK): if INTER['h'][HEADT[w]] == INTER['t'][HEADT[ HLINK[w]]] and INTER['t'][ HEADT[w]] == INTER['h'][HEADT[HLINK[w]]]: print ' ~> Removing dupicate segments in part: ', part, SINTER[ part][HEADT[w]], SINTER[part][HEADT[HLINK[w]]] if HEADT[w] > HEADT[HLINK[w]]: SINTER[part].pop(HEADT[w]) SINTER[part].pop(HEADT[HLINK[w]]) else: SINTER[part].pop(HEADT[HLINK[w]]) SINTER[part].pop(HEADT[w]) found = True break w += 1 return SINTER, SNHALO, PNODDS def getIKLE(self, npart): # ~~> get IKLE for that part ... still with global element numbers GIKLE = np.compress(self.KSPLIT == npart, self.slf.IKLE, axis=0) KELLG = np.compress(self.KSPLIT == npart, range(len(self.slf.IKLE)), axis=0) # ~~> KNOLG(NPOIN3) gives the global node number such that # for i = 1,NPOIN3: Fwrite(i) = Fread(KNOLG(i)) and is ordered KNOLG, indices = np.unique(np.ravel(GIKLE), return_index=True) KNOGL = dict(zip(KNOLG, range(len(KNOLG)))) LIKLE = -np.ones_like(GIKLE, dtype=np.int) pbar = ProgressBar(maxval=len(GIKLE)).start() for k in range(len(GIKLE)): LIKLE[k] = [ KNOGL[GIKLE[k][0]], KNOGL[GIKLE[k][1]], KNOGL[GIKLE[k][2]] ] pbar.update(k) pbar.finish() return LIKLE, KELLG, KNOLG def resetPartition(self, part, PINTER, KSPLIT): MASKER = np.zeros(self.slf.NPOIN3, dtype=np.int) for p in PINTER: MASKER[p] = np.arange(len(p)) + 1 # PINTER is ordered KIKLE = np.compress( np.maximum(*(MASKER[self.slf.IKLE].T)) >= 0, range(len(self.slf.IKLE))) #KIKLE = np.compress(np.count_nonzero(MASKER[self.slf.IKLE],axis=1)>2,range(len(self.slf.IKLE))) # /!\ does not work ? pbar = ProgressBar(maxval=len(KIKLE)).start() for k in KIKLE: e = self.slf.IKLE[k] if np.count_nonzero(MASKER[e]) < 2 or KSPLIT[k] == part: continue for p1, p2 in zip([0, 1, 2], [1, 2, 0]): if MASKER[e[p1]] > 0 and MASKER[e[p2]] > 0 and MASKER[ e[p2]] > MASKER[e[p1]]: print ' ~> Warning for element of part: ', part, '(was:', KSPLIT[ k], ') ', k, e #KSPLIT[k] = part pbar.update(k) pbar.finish() return KSPLIT def joinPairs(self, polyLines): INTER = np.array(polyLines, dtype=[('h', int), ('t', int)]) IDONE = np.ones(len(polyLines), dtype=np.int) polyA = [] polyZ = [] polyL = [] # ~~> Finding the endings HEADT = np.argsort( INTER['h']) # knowing that INTER[HEADT] is sorted by the head HLINK = np.searchsorted( INTER['h'][HEADT], INTER['t'][HEADT]) # INTER['h'][HEADT] is sorted # ... HLINK[w] for w in INTER['t'] gives you the position of INTER['t'][w] in INTER['h'][HEADT] w = min(np.compress(np.not_equal(IDONE, IDONE * 0), range(len(HEADT)))) po = INTER['h'][HEADT[w]] pe = INTER['t'][HEADT[w]] IDONE[w] = 0 polyA.append(po) swapMinMax = True while True: if HLINK[w] < len(INTER): if INTER['t'][HEADT][w] == INTER['h'][HEADT][HLINK[w]]: w = HLINK[w] pe = INTER['t'][HEADT][w] IDONE[w] = 0 if pe not in polyA: if HLINK[w] < len(INTER): if INTER['t'][HEADT][w] != po and INTER['t'][HEADT][ w] == INTER['h'][HEADT][HLINK[w]]: continue if po == pe: polyL.append(pe) else: if pe not in polyZ: polyZ.append(pe) else: polyA.append(po) if np.count_nonzero(IDONE) == 0: break if swapMinMax: w = max( np.compress(np.not_equal(IDONE, IDONE * 0), range(len(HEADT)))) else: w = min( np.compress(np.not_equal(IDONE, IDONE * 0), range(len(HEADT)))) swapMinMax = not swapMinMax po = INTER['h'][HEADT[w]] pe = INTER['t'][HEADT[w]] IDONE[w] = 0 polyA.append(po) # ~~> Finding the sources TAILT = np.argsort( INTER['t']) # knowing that INTER[TAILT] is sorted by the tail TLINK = np.searchsorted( INTER['t'][TAILT], INTER['h'][TAILT]) # INTER['h'][HEADT] is sorted # ... TLINK[w] for w in polyZ gives you the position of polyZ[w] in INTER['t'][TAILT] polyGones = [] # ~~> Finding the sources of non-looping lines TAILS = np.searchsorted(INTER['t'][TAILT], polyZ) for w in TAILS: p = [INTER['t'][TAILT[w]]] while True: if INTER['h'][TAILT][w] == INTER['t'][TAILT][TLINK[w]]: po = [INTER['h'][TAILT][w]] po.extend(p) p = po w = TLINK[w] if TLINK[w] < len(INTER): if INTER['h'][TAILT][w] == INTER['t'][TAILT][TLINK[w]]: continue po = [INTER['h'][TAILT][w]] po.extend(p) p = po break polyGones.append(p) # ~~> Finding the sources of looping lines LOOPS = np.searchsorted(INTER['t'][TAILT], polyL) for w in LOOPS: p = [INTER['t'][TAILT[w]]] while True: if INTER['h'][TAILT][w] == INTER['t'][TAILT][TLINK[w]]: po = [INTER['h'][TAILT][w]] po.extend(p) p = po w = TLINK[w] if INTER['h'][TAILT][w] != p[len(p) - 1]: continue po = [INTER['h'][TAILT][w]] po.extend(p) p = po break polyGones.append(p) return polyGones def joinSegments(self, polyLines): polyGones = [] maxbar = max(len(polyLines), 1) pbar = ProgressBar(maxval=maxbar).start() while polyLines != []: # ~~> starting point e = polyLines[0] le = len(e) a, b = e[0], e[len(e) - 1] # ~~> case of closed line if a == b: polyGones.append( e[0:len(e)]) # /!\ here you keep the duplicated point polyLines.pop(0) continue # ~~> iterative process for ei, iline in zip(polyLines[1:], range(len(polyLines))[1:]): # ~~> merging the two segments if b == ei[0]: polyLines[0] = e[0:len(e)] # copy ! polyLines[0].extend(ei[1:]) polyLines.pop(iline) break if a == ei[len(ei) - 1]: polyLines[0] = ei[0:len(ei)] # copy ! polyLines[0].extend(e[1:]) polyLines.pop(iline) break # ~~> completed search if le == len(polyLines[0]): polyGones.append(e[0:len(e)]) polyLines.pop(0) pbar.update(maxbar - len(polyLines)) pbar.finish() return polyGones def tetrisOddSegments(self, main, odds): polyGones = [] lo = len(odds) while main != []: # ~~> starting point e = main[0] le = len(e) a, b = e[0], e[len(e) - 1] # ~~> case of closed line if a == b: polyGones.append( e[0:len(e)]) # /!\ here you keep the duplicated point main.pop(0) continue # ~~> iterative process for ei, iline in zip(odds, range(len(odds))): # ~~> merging the two segments if b == ei[0]: main[0] = e[0:len(e)] main[0].extend(ei[1:]) odds.pop(iline) break if a == ei[len(ei) - 1]: main[0] = ei[0:len(ei)] main[0].extend(e[1:]) odds.pop(iline) break # ~~> completed search if le == len(main[0]): polyGones.append(e[0:len(e)]) main.pop(0) # ~~> removing the over-constrained elements for p in polyGones: if len(p) > 3: j = 2 while j < len(p): if p[j - 2] == p[j]: p.pop(j - 2) p.pop(j - 2) j += 1 return polyGones # Filter poly according to IPOBO on that part. # ~> gloseg: is the ensemble of either closed islands or # open external boundary segments # Note: filtering now seems to mean that to have done a lot of work for nothing def globalSegments(self, poly): gloseg = [] for p in poly: pA = p[0] pZ = p[len(p) - 1] closed = False if pA == pZ and self.IPOBO[pA] != 0: closed = True iA = 0 iZ = 0 ploseg = [] for i in p: if self.IPOBO[ i] != 0: # moves the counter along for external points iZ += 1 elif iZ != 0: # you have just found the end of an external segment ploseg.append(p[iA:iA + iZ]) iA += iZ + 1 iZ = 0 else: iA += 1 if iZ != 0: if closed and len(ploseg) > 0: i = p[iA:iA + iZ] i.extend(ploseg[0][1:]) # remove duplicate ploseg[0] = i else: ploseg.append(p[iA:iA + iZ]) gloseg.extend(ploseg) return gloseg def putContent(self): # ~~> Extension for parallel file names fmtn = '00000' + str(self.NPARTS - 1) fmtn = fmtn[len(fmtn) - 5:] print '\n... Split the boundary connectivity' # ~~> Assemble internal and external segments polyCLOSED = dict([(i, []) for i in range(self.NPARTS)]) polyFILTER = dict([(i, []) for i in range(self.NPARTS)]) polyGLOSED = [] for part in range(self.NPARTS): # this could be done in parallel print ' +> Joining up boundary segments for part: ', part + 1 # ~~> Joining up boundaries for sub-domains print ' ~> main internal segments' self.PINTER[part] = self.joinPairs(self.PINTER[part]) print ' ~> main external segments' polyHALO = self.joinPairs(self.PNHALO[part]) polyHALO.extend(self.PINTER[part]) polyHALO = self.joinSegments(polyHALO) print ' ~> odd segments' polyODDS = self.joinSegments(self.PNODDS[part]) print ' ~> stitching with the odd ones' polyGones = self.tetrisOddSegments(polyHALO, polyODDS) print ' ~> final closure' polyCLOSED[part] = self.joinSegments(polyGones) # ~~> Building up the entire picture polyFILTER[part] = self.globalSegments(polyCLOSED[part]) polyGLOSED.extend(polyFILTER[part]) # ~~> Joining up boundaries for the global domain (Note: seems counter productive but is not) polyGLOSED = self.joinSegments(polyGLOSED) if self.isDOMAIN != '': print '\n... Printing the domain split into a series of i2s files' # ~~> Convert node numbers into x,y for part in range(self.NPARTS): print ' +> part ', part + 1, ' of ', self.NPARTS polyXY = [] for pg in range(len(polyCLOSED[part])): pxy = [] for pt in range(len(polyCLOSED[part][pg])): n = polyCLOSED[part][pg][pt] pxy.append([self.slf.MESHX[n], self.slf.MESHY[n]]) polyXY.append(pxy) # ~~> Write polygons to double check fmti = '00000' + str(part) fmti = fmti[len(fmti) - 5:] fileName = path.join( path.dirname(self.slf.fileName), self.isDOMAIN + fmtn + '-' + fmti + '.i2s') putInS(fileName, [], 'i2s', polyXY) # ~~> Convert node numbers into x,y polyXY = [] for pg in range(len(polyGLOSED)): pxy = [] for pt in range(len(polyGLOSED[pg])): n = polyGLOSED[pg][pt] pxy.append([self.slf.MESHX[n], self.slf.MESHY[n]]) polyXY.append(pxy) # ~~> Write polygons to double check fileName = path.join(path.dirname(self.slf.fileName), self.isDOMAIN + '.i2s') putInS(fileName, [], 'i2s', polyXY) print '\n... Final check to the element partitioning' for part in range(self.NPARTS): # this could be done in parallel self.KSPLIT = self.resetPartition(part, self.PINTER[part], self.KSPLIT) if self.isDOMAIN != '': # ~~> This is optional print '\n... Printing the domain split into a SELAFIN' fileRoot, fileExts = path.splitext(self.slf.fileName) self.slf.fole = open(fileRoot + '_PROCS' + fileExts, 'wb') putHeaderSLF(self.slf) appendCoreTimeSLF(self.slf, 0) VARSOR = self.slf.getVALUES(0) for v in range(self.slf.NVAR): VARSOR[v] = self.NSPLIT appendCoreVarsSLF(self.slf, VARSOR) self.slf.fole.close() print '\n... Storing the global liquid boundary numbering (NUMLIQ)' # ~~> Implying NUMLIQ and the number NFRLIQ based on the joined-up lines self.clm.setNUMLIQ(polyGLOSED) print '\n... Split the mesh connectivity' # ~~> Preliminary set up for LIKLE, KNOLG and KEMLG by parts LIKLE = dict([(i, []) for i in range(self.NPARTS)]) KELLG = dict([(i, []) for i in range(self.NPARTS)]) KNOLG = dict([(i, []) for i in range(self.NPARTS)]) for part in range(self.NPARTS): print ' +> re-ordering IKLE for part ', part + 1 LIKLE[part], KELLG[part], KNOLG[part] = self.getIKLE(part) # ~~> CONLIM file: Preliminary set up of IFAPAR and ISEG for all parts IFAPAR = dict([(i, {}) for i in range(self.NPARTS)]) ISEG = {} # Organising ISEG for easier call: part 1 for part in range(self.NPARTS): for i in polyFILTER[part]: if i[0] == i[len(i) - 1]: continue # /!\ you are here adding one ! if i[0] in ISEG.keys(): ISEG[i[0]].update({part: i[1] + 1}) else: ISEG.update({i[0]: {part: i[1] + 1}}) if i[len(i) - 1] in ISEG.keys(): ISEG[i[len(i) - 1]].update({part: -i[len(i) - 2] - 1}) else: ISEG.update({i[len(i) - 1]: {part: -i[len(i) - 2] - 1}}) # Switching parts of ISEG for final call: part 2 for i in ISEG.keys(): if len(ISEG[i]) != 2: print '... You have a boundary node surounded with more than two boundary segments: ', i sys.exit() parts = ISEG[i].keys() ISEG[i] = { parts[0]: ISEG[i][parts[1]], parts[1]: ISEG[i][parts[0]] } # ~~> CONLIM file: Preliminary set up of NPTIR for all parts NPTIR = dict([(i, {}) for i in range(self.NPARTS)]) for part in range(self.NPARTS): for p in self.PINTER[part]: NPTIR[part].update(dict([(i, []) for i in p])) parts = range(self.NPARTS) while parts != []: part = parts[0] parts.pop(0) for ip in NPTIR[part].keys(): for ipart in parts: if ip in NPTIR[ipart].keys(): NPTIR[part][ip].append(ipart) NPTIR[ipart][ip].append(part) print '... Split of the SELAFIN file' for part in range(self.NPARTS): fmti = '00000' + str(part) fmti = fmti[len(fmti) - 5:] print ' +> part ', part + 1, ' of ', self.NPARTS self.slfn.IKLE = LIKLE[part] self.slfn.NELEM3 = len(LIKLE[part]) self.slfn.NPOIN3 = len(KNOLG[part]) # ~~> IPARAM has two new values: 8:NPTFR and 9:NPTIR self.slfn.IPARAM[7] = len( np.unique(np.concatenate(polyFILTER[part]))) self.slfn.IPARAM[8] = len(NPTIR[part]) # ~~> IPOBO (or IRAND) converted into KNOLG[part] self.slfn.IPOBO = KNOLG[part] + 1 print ' ~> filtering the MESH' # ~~> GEO file: MESH coordinates self.slfn.MESHX = np.zeros(self.slfn.NPOIN3, dtype=np.float32) self.slfn.MESHY = np.zeros(self.slfn.NPOIN3, dtype=np.float32) self.slfn.MESHX = self.slf.MESHX[KNOLG[part]] self.slfn.MESHY = self.slf.MESHY[KNOLG[part]] # ~~> GEO file: File names fileRoot, fileExts = path.splitext(self.slf.fileName) self.slfn.fileName = fileRoot + fmtn + '-' + fmti + fileExts # ~~> GEO file: Printing print ' ~> printing: ', self.slfn.fileName self.slfn.fole = open(self.slfn.fileName, 'wb') putHeaderSLF(self.slfn) LVARSOR = np.zeros((self.slfn.NVAR, self.slfn.NPOIN3), dtype=np.float32) for t in range(len(self.slf.tags['times'])): appendCoreTimeSLF(self.slfn, t) VARSOR = self.slf.getVALUES(t) for v in range(self.slfn.NVAR): LVARSOR[v] = VARSOR[v][KNOLG[part]] appendCoreVarsSLF(self.slfn, LVARSOR) self.slfn.fole.close() if not self.isCONLIM: return print '\n... Connect elements across internal boundaries (IFAPAR)' for part in range(self.NPARTS): print ' +> part ', part + 1, ' of ', self.NPARTS # ~~> CONLIM file: Preliminary set up of PEHALO elements accross internal boundaries PEHALO = {} SEHALO = {} # Step 1: find out about the primary elements and loop through IKLE self.NSPLIT *= 0 MASKER = NPTIR[part].keys() self.NSPLIT[MASKER] += 1 print ' ~> Assembling primary elements with other side' # Sub Step 1: Assembling all edges from the other sides maxbar = 0 ibar = 0 for ip in range(self.NPARTS): maxbar += len(LIKLE[ip]) pbar = ProgressBar(maxval=maxbar).start() for otherpart in range(self.NPARTS): if otherpart == part: continue # all parts are still positive at this stage for k in range(len(LIKLE[otherpart])): ibar += 1 e = self.slf.IKLE[KELLG[otherpart][k]] if np.count_nonzero(self.NSPLIT[e]) < 2: continue for p1, p2 in zip([1, 2, 0], [ 0, 1, 2 ]): # reverse order because looking from the other side if self.NSPLIT[e[p1]] > 0 and self.NSPLIT[e[p2]] > 0: if not PEHALO.has_key((e[p1], e[p2])): PEHALO.update({(e[p1], e[p2]): [0, []]}) PEHALO[(e[p1], e[p2])][1].append(k) PEHALO[(e[p1], e[p2])][1].append(otherpart) pbar.update(ibar) # Sub Step 2: Assembling all edges from the primary side (there are three times more of them) for k in range(len(LIKLE[part])): ibar += 1 j = KELLG[part][k] e = self.slf.IKLE[j] if np.count_nonzero(self.NSPLIT[e]) < 2: continue for p1, p2, p3 in zip([0, 1, 2], [1, 2, 0], [2, 0, 1]): if self.NSPLIT[e[p1]] > 0 and self.NSPLIT[e[p2]] > 0: if PEHALO.has_key( (e[p1], e[p2])): # the good side opposes the dark side PEHALO[(e[p1], e[p2])][0] = k if self.NSPLIT[e[p3]] == 0: self.NSPLIT[e[p3]] = -1 if self.NSPLIT[e[p3]] == -1: if not SEHALO.has_key((e[p1], e[p3])): SEHALO.update({(e[p1], e[p3]): []}) SEHALO[(e[p1], e[p3])].append(k) if not SEHALO.has_key((e[p2], e[p3])): SEHALO.update({(e[p2], e[p3]): []}) SEHALO[(e[p2], e[p3])].append(k) else: # self.NSPLIT[e[p3]] must be 2 ! if not SEHALO.has_key((e[p3], e[p1])): SEHALO.update({(e[p3], e[p1]): []}) if k not in SEHALO[(e[p3], e[p1])]: SEHALO[(e[p3], e[p1])].append(k) if not SEHALO.has_key((e[p2], e[p3])): SEHALO.update({(e[p2], e[p3]): []}) if k not in SEHALO[(e[p2], e[p3])]: SEHALO[(e[p2], e[p3])].append(k) if self.KSPLIT[j] >= 0: self.KSPLIT[j] = -( self.KSPLIT[j] + 1 ) # /!\ This is very dangerous but necessary pbar.update(ibar) pbar.finish() # Sub Step 3: Final clean up of the other side ? no need but check later for (ei)[0] == 0 # Step 2: find out about the secondary elements on IKLE ( local LIKLE ? ) print ' ~> Assembling secondary elements of that side' pbar = ProgressBar(maxval=len(LIKLE[part])).start() for k in range(len(LIKLE[part])): j = KELLG[part][k] e = self.slf.IKLE[j] if self.KSPLIT[j] != part: continue if np.count_nonzero(self.NSPLIT[e]) < 2: continue for i in [0, 1, 2]: ii = (i + 1) % 3 if self.NSPLIT[e[i]] > 0 and self.NSPLIT[ e[ii]] < 0 and SEHALO.has_key((e[i], e[ii])): SEHALO[(e[i], e[ii])].append(k) # correct orientation if self.NSPLIT[e[i]] > 0 and self.NSPLIT[ e[ii]] > 0 and SEHALO.has_key((e[ii], e[i])): SEHALO[(e[ii], e[i])].append(k) # opposite orientation ii = (i + 2) % 3 if self.NSPLIT[e[i]] > 0 and self.NSPLIT[ e[ii]] < 0 and SEHALO.has_key((e[i], e[ii])): SEHALO[(e[i], e[ii])].append(k) # correct orientation if self.NSPLIT[e[i]] > 0 and self.NSPLIT[ e[ii]] > 0 and SEHALO.has_key((e[i], e[ii])): SEHALO[(e[i], e[ii])].append(k) # opposite orientation if self.KSPLIT[j] < 0: self.KSPLIT[ j] = -self.KSPLIT[j] - 1 # /!\ back to a safe place pbar.update(k) pbar.finish() # Step 3: finally cross reference information between SEHALO and PEHALO print ' ~> Combining sides surrounding the halo-elements' for ie in PEHALO.keys(): if PEHALO[ie][0] == 0: continue k = PEHALO[ie][0] # element number in its local part numbering if not IFAPAR[part].has_key(k): IFAPAR[part].update({k: [-2, -1, -2, -1, -2, -1]}) j = KELLG[part][k] e = self.slf.IKLE[j] for p1, p2 in zip([0, 1, 2], [1, 2, 0]): if SEHALO.has_key((e[p1], e[p2])): if len(SEHALO[(e[p1], e[p2])]) > 1: if SEHALO[(e[p1], e[p2])][0] == k: IFAPAR[part][k][2 * p1] = SEHALO[(e[p1], e[p2])][1] if SEHALO[(e[p1], e[p2])][1] == k: IFAPAR[part][k][2 * p1] = SEHALO[(e[p1], e[p2])][0] IFAPAR[part][k][1 + 2 * p1] = part if SEHALO.has_key((e[p2], e[p1])): if len(SEHALO[(e[p2], e[p1])]) > 1: if SEHALO[(e[p2], e[p1])][0] == k: IFAPAR[part][k][2 * p1] = SEHALO[(e[p2], e[p1])][1] if SEHALO[(e[p2], e[p1])][1] == k: IFAPAR[part][k][2 * p1] = SEHALO[(e[p2], e[p1])][0] IFAPAR[part][k][1 + 2 * p1] = part if ie == (e[p1], e[p2]): IFAPAR[part][k][2 * p1] = PEHALO[ie][1][0] IFAPAR[part][k][1 + 2 * p1] = PEHALO[ie][1][1] # ~~> CONLIM file: Write to file ... pfuuuuuh ... this is it ! print '\n... Split of the CONLIM files' for part in range(self.NPARTS): fmti = '00000' + str(part) fmti = fmti[len(fmti) - 5:] print ' +> part: ', part + 1, ' of ', self.NPARTS # ~~> CONLIM file: Set the filter INDEX = np.zeros_like(self.clm.INDEX, dtype=np.int) for contour in polyFILTER[part]: # ~~> Closed contour: no need to change ISEG if contour[0] == contour[len(contour) - 1]: for c in contour[1:]: INDEX[self.clm.KFRGL[c]] = self.clm.KFRGL[c] + 1 # ~~> Open contour: need to change ISEG with neighbours else: for c in contour[0:]: INDEX[self.clm.KFRGL[c]] = self.clm.KFRGL[c] + 1 iA = self.clm.KFRGL[contour[0]] self.clm.POR['is'][iA] = ISEG[contour[0]][part] self.clm.POR['xs'][iA] = self.slf.MESHX[abs( ISEG[contour[0]][part]) - 1] # /!\ MESHX start at 0 self.clm.POR['ys'][iA] = self.slf.MESHY[abs( ISEG[contour[0]][part]) - 1] # /!\ MESHY start at 0 iA = self.clm.KFRGL[contour[len(contour) - 1]] self.clm.POR['is'][iA] = ISEG[contour[len(contour) - 1]][part] self.clm.POR['xs'][iA] = self.slf.MESHX[ abs(ISEG[contour[len(contour) - 1]][part]) - 1] self.clm.POR['ys'][iA] = self.slf.MESHY[ abs(ISEG[contour[len(contour) - 1]][part]) - 1] self.clm.INDEX = INDEX # ~~> CONLIM file: Set the NPTIR and CUTs self.clm.NPTIR = NPTIR[part] # ~~> CONLIM file: Set the IFAPAR self.clm.IFAPAR = IFAPAR[part] # ~~> CONLIM file fileRoot, fileExts = path.splitext(self.clm.fileName) print ' ~> printing: ', fileRoot + fmtn + '-' + fmti + fileExts self.clm.putContent(fileRoot + fmtn + '-' + fmti + fileExts) return
def main1(self): progress.setPercentage(0) progress.setText(str(ctime()) + " - Initialisation - Debut du script") # Chargement du fichier .res**************************************** slf = SELAFIN(self.donnees_d_entree["pathselafin"]) # Recherche du temps a traiter *********************************************** test = False for i, time in enumerate(slf.tags["times"]): progress.setText( str(ctime()) + " - Initialisation - Temps present dans le fichier : " + str(np.float64(time)) ) # print str(i) +" "+ str(time) + str(type(time)) if float(time) == float(self.donnees_d_entree["temps"]): test = True values = slf.getVALUES(i) if test: progress.setText( str(ctime()) + " - Initialisation - Temps traite : " + str(np.float64(self.donnees_d_entree["temps"])) ) else: raise GeoAlgorithmExecutionException( str(ctime()) + " - Initialisation - Erreur : \ Temps non trouve" ) # Recherche de la variable a traiter **************************************** test = [False, False] tabparam = [] donnees_d_entree["champs"] = QgsFields() for i, name in enumerate(slf.VARNAMES): progress.setText(str(ctime()) + " - Initialisation - Variable dans le fichier res : " + name.strip()) tabparam.append([i, name.strip()]) donnees_d_entree["champs"].append(QgsField(str(name.strip()).translate(None, "?,!.;"), QVariant.Double)) if self.donnees_d_entree["Parametre_vitesse_X"] != None: if str(name).strip() == self.donnees_d_entree["Parametre_vitesse_X"].strip(): test[0] = True self.donnees_d_entree["paramvalueX"] = i if str(name).strip() == self.donnees_d_entree["Parametre_vitesse_Y"].strip(): test[1] = True self.donnees_d_entree["paramvalueY"] = i else: self.donnees_d_entree["paramvalueX"] = None self.donnees_d_entree["paramvalueY"] = None if self.donnees_d_entree["Parametre_vitesse_X"] != None: if test == [True, True]: progress.setText( str(ctime()) + " - Initialisation - Parametre trouvee : " + str(tabparam[self.donnees_d_entree["paramvalueX"]][1]).strip() + " " + str(tabparam[self.donnees_d_entree["paramvalueY"]][1]).strip() ) else: raise GeoAlgorithmExecutionException( str(ctime()) + " - Initialisation - Erreur : \ Parametre vitesse non trouve" ) # Chargement de la topologie du .res ******************************************** self.donnees_d_entree["mesh"] = np.array(slf.IKLE3) self.donnees_d_entree["x"] = slf.MESHX self.donnees_d_entree["y"] = slf.MESHY # Verifie que le shp n existe pas if isFileLocked(self.donnees_d_entree["pathshp"], True): raise GeoAlgorithmExecutionException( str(ctime()) + " - Initialisation - Erreur :\ Fichier shape deja charge !!" ) # Chargement des donnees *********************************** self.donnees_d_entree["ztri"] = [] for i in range(len(tabparam)): self.donnees_d_entree["ztri"].append(values[i]) # Lancement du thread ************************************************************************************** self.worker = Worker(donnees_d_entree) if donnees_d_entree["traitementarriereplan"] == 0: self.worker.moveToThread(self.thread) self.thread.started.connect(self.worker.run) self.worker.progress.connect(progress.setPercentage) self.worker.status.connect(progress.setText) self.worker.finished.connect(workerFinished) self.worker.finished.connect(self.worker.deleteLater) self.thread.finished.connect(self.thread.deleteLater) self.worker.finished.connect(self.thread.quit) champ = QgsFields() writercontour = VectorWriter( self.donnees_d_entree["fichierdesortie_point"], None, champ, QGis.WKBMultiPoint, QgsCoordinateReferenceSystem(str(self.donnees_d_entree["crs"])), ) self.thread.start() else: self.worker.run()
def getHeaderHYCOM(self, bounds): # ~~> inheritence self.slf3d = SELAFIN('') # slf3d self.slf2d = SELAFIN('') # slf2d surface print ' +> Set SELAFIN Variables' self.slf3d.TITLE = '' self.slf3d.NBV1 = 6 self.slf3d.NVAR = 6 self.slf3d.VARINDEX = range(self.slf3d.NVAR) self.slf3d.VARNAMES = ['ELEVATION Z ', \ 'SALINITY ','TEMPERATURE ', \ 'VELOCITY U ','VELOCITY V ','VELOCITY W '] self.slf3d.VARUNITS = ['M ', \ 'G/L ','DEGREES ', \ 'M/S ','M/S ','M/S '] self.slf2d.TITLE = self.slf3d.TITLE self.slf2d.NBV1 = self.slf3d.NBV1 + 1 self.slf2d.NVAR = self.slf3d.NVAR + 1 self.slf2d.VARINDEX = range(self.slf2d.NVAR) self.slf2d.VARNAMES = self.slf3d.VARNAMES[0:-1] self.slf2d.VARNAMES.append('EMP ') self.slf2d.VARNAMES.append('QTOT ') self.slf2d.VARUNITS = self.slf3d.VARUNITS[0:-1] self.slf2d.VARUNITS.append('??? ') self.slf2d.VARUNITS.append('??? ') # ~~> server access, # get the grid and header from the latest experiment self.hycomdata = self.experiments[0][0] # ~~~~ Grid coordinates ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ success = False while not success: try: success = True # ~~> the whole of the 2D grid sizes print ' +> Extract HYCOM sizes' NX1D = self.hycomdata['X'].shape[0] NY1D = self.hycomdata['Y'].shape[0] print ' +> Extract HYCOM mesh' lonX1D = self.hycomdata['Longitude']['Longitude'].data[ 0, 0:NX1D].ravel() % 360 latY1D = self.hycomdata['Latitude']['Latitude'].data[ 0:NY1D, 0].ravel() except: success = False print ' ... re-attempting ' # ~~> lat,lon correction for i in range(NX1D): if (lonX1D[i] > 180): lonX1D[i] = lonX1D[i] - 360.0 for i in range(2172, NY1D): latY1D[i] = 47.0 + (i - 2172) / 18.0 # ~~> subset for the SELAFIN print ' +> Set SELAFIN mesh' self.hycomilon = np.where( (lonX1D >= bounds[0][1]) * (lonX1D <= bounds[1][1]))[0] self.hycomilat = np.where( (latY1D >= bounds[0][0]) * (latY1D <= bounds[1][0]))[0] x = lonX1D[self.hycomilon] y = latY1D[self.hycomilat] NX1D = len(x) NY1D = len(y) # ~~~~ MESH sizes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # ~~> 3D success = False while not success: try: success = True print ' +> Set SELAFIN sizes' self.slf3d.NPLAN = self.hycomdata['Depth'].shape[0] self.ZPLAN = self.hycomdata['Depth'][ 0:self.slf3d.NPLAN][::-1] # I do not know any other way except: success = False print ' ... re-attempting ' self.slf3d.NDP2 = 3 self.slf3d.NDP3 = 6 self.slf3d.NPOIN2 = NX1D * NY1D self.slf3d.NPOIN3 = self.slf3d.NPOIN2 * self.slf3d.NPLAN self.slf3d.NELEM2 = 2 * (NX1D - 1) * (NY1D - 1) self.slf3d.NELEM3 = self.slf3d.NELEM2 * (self.slf3d.NPLAN - 1) self.slf3d.IPARAM = [0, 0, 0, 0, 0, 0, self.slf3d.NPLAN, 0, 0, 0] # ~~> 2D self.slf2d.NPLAN = 1 self.slf2d.NDP2 = self.slf3d.NDP2 self.slf2d.NDP3 = self.slf2d.NDP2 self.slf2d.NPOIN2 = self.slf3d.NPOIN2 self.slf2d.NPOIN3 = self.slf2d.NPOIN2 self.slf2d.NELEM2 = self.slf3d.NELEM2 self.slf2d.NELEM3 = self.slf2d.NELEM2 self.slf2d.IPARAM = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0] print ' +> Set SELAFIN mesh' self.slf3d.MESHX = np.tile(x, NY1D).reshape(NY1D, NX1D).T.ravel() self.slf3d.MESHY = np.tile(y, NX1D) self.slf2d.MESHX = self.slf3d.MESHX[0:self.slf2d.NPOIN2] self.slf2d.MESHY = self.slf3d.MESHY[0:self.slf2d.NPOIN2] # ~~~~ Connectivity ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set SELAFIN IKLE' ielem = 0 pbar = ProgressBar(maxval=self.slf3d.NELEM3).start() self.slf3d.IKLE3 = np.zeros((self.slf3d.NELEM3, self.slf3d.NDP3), dtype=np.int) for k in range(1, self.slf3d.NPLAN): for i in range(1, NX1D): for j in range(1, NY1D): ipoin = (i - 1) * NY1D + j - 1 + (k - 1) * self.slf3d.NPOIN2 # ~~> first prism self.slf3d.IKLE3[ielem][0] = ipoin self.slf3d.IKLE3[ielem][1] = ipoin + NY1D self.slf3d.IKLE3[ielem][2] = ipoin + 1 self.slf3d.IKLE3[ielem][3] = ipoin + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][ 4] = ipoin + NY1D + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][5] = ipoin + 1 + self.slf3d.NPOIN2 ielem = ielem + 1 pbar.update(ielem) # ~~> second prism self.slf3d.IKLE3[ielem][0] = ipoin + NY1D self.slf3d.IKLE3[ielem][1] = ipoin + NY1D + 1 self.slf3d.IKLE3[ielem][2] = ipoin + 1 self.slf3d.IKLE3[ielem][ 3] = ipoin + NY1D + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][ 4] = ipoin + NY1D + 1 + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][5] = ipoin + 1 + self.slf3d.NPOIN2 ielem = ielem + 1 pbar.update(ielem) pbar.finish() self.slf2d.IKLE3 = np.compress( np.repeat([True, False], self.slf2d.NDP2), self.slf3d.IKLE3[0:self.slf3d.NELEM2], axis=1) #.reshape((self.slf3d.NELEM2,self.slf3d.NDP2)) # ~~~~ Boundaries ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set SELAFIN IPOBO' pbar = ProgressBar(maxval=NX1D + NY1D).start() self.slf3d.IPOB3 = np.zeros(self.slf3d.NPOIN3, dtype=np.int) # ~~> along the x-axis (lon) for i in range(NX1D): for k in range(1, self.slf3d.NPLAN + 1): ipoin = i * NY1D + (k - 1) * (2 * NX1D + 2 * NY1D - 4) self.slf3d.IPOB3[ipoin] = i + 1 + (k - 1) * (2 * NX1D + 2 * NY1D - 4) ipoin = i * NY1D - 1 + (k - 1) * (2 * NX1D + 2 * NY1D - 4) self.slf3d.IPOB3[ipoin] = 2 * NX1D + ( NY1D - 2) - i + (k - 1) * (2 * NX1D + 2 * NY1D - 4) pbar.update(i) # ~~> along the y-axis (alt) for i in range(1, NY1D): for k in range(1, self.slf3d.NPLAN + 1): ipoin = i + (k - 1) * (2 * NX1D + 2 * NY1D - 4) self.slf3d.IPOB3[ipoin] = 2 * NX1D + 2 * (NY1D - 2) - i + 1 + ( k - 1) * (2 * NX1D + 2 * NY1D - 4) ipoin = NY1D * (NX1D - 1) + i + (k - 1) * (2 * NX1D + 2 * NY1D - 4) self.slf3d.IPOB3[ipoin] = NX1D + i + (k - 1) * (2 * NX1D + 2 * NY1D - 4) pbar.update(i + NX1D) pbar.finish() self.slf2d.IPOB3 = self.slf3d.IPOB3[0:self.slf3d.NPOIN2]
class splitSELAFIN(): def __init__(self,SLFfileName,CLMfileName,SEQfileName='',splitCONLIM=False,DOMfileRoot=''): print '\n... Acquiring global files' # ~~> Acquire global CONLIM file print ' +> CONLIM file' self.clm = CONLIM(CLMfileName) self.isCONLIM = splitCONLIM # ~~> Acquire global SELAFIN file print ' +> SELAFIN file' self.slf = SELAFIN(SLFfileName) # ~~> Acquire global SELAFIN file if SEQfileName != '': print ' +> SEQUENCE file' self.NPARTS,self.NSPLIT,self.KSPLIT = self.getSplitFromSequence(np.array( getFileContent(SEQfileName), dtype='<i4' )) else: self.NPARTS,self.NSPLIT,self.KSPLIT = self.getSplitFromNodeValues('PROCESSORS') print '\n... Split by elements in ',self.NPARTS,' parts\n' # ~~> Clean inconsistencies in boundary segments self.IPOBO,self.NSPLIT,self.KSPLIT = self.setSplitForBoundaries(self.NSPLIT,self.clm.KFRGL,self.KSPLIT) self.PINTER,self.PNHALO,self.PNODDS = \ self.setSplitForElements( self.IPOBO,self.NPARTS,self.NSPLIT,self.KSPLIT ) self.slfn = self.copyCommonData() # ~~> Optional output file names self.isDOMAIN = DOMfileRoot # Make a copy of common information for sub-meshes def copyCommonData(self): SLFn = SELAFIN('') # Meta data SLFn.TITLE = self.slf.TITLE SLFn.file = self.slf.file SLFn.IPARAM = self.slf.IPARAM # Time SLFn.DATETIME = self.slf.DATETIME SLFn.tags = self.slf.tags # Variables SLFn.NBV1 = self.slf.NBV1 SLFn.VARNAMES = self.slf.VARNAMES SLFn.VARUNITS = self.slf.VARUNITS SLFn.NBV2 = self.slf.NBV2 SLFn.CLDNAMES = self.slf.CLDNAMES SLFn.CLDUNITS = self.slf.CLDUNITS SLFn.NVAR = self.slf.NVAR SLFn.VARINDEX = range(self.slf.NVAR) # Unchanged numbers SLFn.NPLAN = self.slf.NPLAN SLFn.NDP2 = self.slf.NDP2 SLFn.NDP3 = self.slf.NDP3 return SLFn # Split based on a sequence of parts, one for each element (result from METIS) def getSplitFromSequence(self,KSPLIT): # ~~> NPARTS is the number of parts /!\ does not check continuity vs. missing parts NPARTS = max(*KSPLIT) NSPLIT = np.zeros( self.slf.NPOIN2 ,dtype=np.int ) for part in range(NPARTS): k = np.compress(KSPLIT==(part+1),range(len(self.slf.IKLE))) NSPLIT[self.slf.IKLE[k]] = KSPLIT[k] return NPARTS,NSPLIT-1,KSPLIT-1 # Split based on the variable PROCESSORS, defined at the nodes def getSplitFromNodeValues(self,var): # ~~> Filter for 'PROCESSORS' as input to the getVariablesAt method i,vn = subsetVariablesSLF(var,self.slf.VARNAMES) if i == []: print '... Could not find ',var,', you may need another split method' sys.exit(1) # ~~> NSPLIT is the interger value of the variable PROCESSORS (time frame 0) NSPLIT = np.array( self.slf.getVariablesAt( 0,i )[0], dtype=np.int) # ~~> NPARTS is the number of parts /!\ does not check continuity vs. missing parts NPARTS = max(*NSPLIT) + 1 # User numbering NSPLIT starts from 0 KSPLIT = np.minimum(*(NSPLIT[self.slf.IKLE].T)) return NPARTS,NSPLIT,KSPLIT def setSplitForBoundaries(self,NSPLIT,KFRGL,KSPLIT): # ~~> Join up the global boundary nodes with the halo elements IPOBO = np.zeros(self.slf.NPOIN2,dtype=np.int) IPOBO[KFRGL.keys()] = np.array(KFRGL.values(),dtype=np.int)+1 # this is so the nonzero search is easier # ~~> Cross check partition quality -- step 1 found = True; nloop = 0 while found: found = False; nloop += 1 for k in range(len(self.slf.IKLE)): e = self.slf.IKLE[k] if KSPLIT[k] != max( NSPLIT[e] ): for p1,p2,p3 in zip([0,1,2],[1,2,0],[2,0,1]): if NSPLIT[e[p1]] != KSPLIT[k] and NSPLIT[e[p2]] != KSPLIT[k]: if IPOBO[e[p1]] != 0 and IPOBO[e[p2]] != 0: print ' ~> correcting boundary segment at iteration: ',nloop,(e[p1],e[p2]),k,KSPLIT[k],e,NSPLIT[e] NSPLIT[e[p1]] = NSPLIT[e[p3]] NSPLIT[e[p2]] = NSPLIT[e[p3]] KSPLIT[k] = NSPLIT[e[p3]] found = True # ~~> Cross check partition quality -- step 2 found = True; nloop = 0 while found: found = False; nloop += 1 for k in range(len(self.slf.IKLE)): e = self.slf.IKLE[k] if min( NSPLIT[e] ) != max( NSPLIT[e] ) and KSPLIT[k] != min( NSPLIT[e] ): print ' ~> correcting internal segment at iteration: ',nloop,k,KSPLIT[k],e,NSPLIT[e] KSPLIT[k] = min( NSPLIT[e] ) found = True return IPOBO,NSPLIT,KSPLIT # Split based on the variable PROCESSORS, defined at the nodes def setSplitForElements(self,IPOBO,NPARTS,NSPLIT,KSPLIT): SNHALO = dict([ (i,[]) for i in range(NPARTS) ]) PNODDS = dict([ (i,[]) for i in range(NPARTS) ]) SINTER = dict([ (i,[]) for i in range(NPARTS) ]) # ~~> Internal segments separating parts pbar = ProgressBar(maxval=len(self.slf.IKLE)).start() for k in range(len(self.slf.IKLE)): e = self.slf.IKLE[k] # Case 1: you are at an internal boundary element if KSPLIT[k] != max( NSPLIT[e] ): for p1,p2 in zip([0,1,2],[1,2,0]): if NSPLIT[e[p1]] != KSPLIT[k] and NSPLIT[e[p2]] != KSPLIT[k]: SINTER[KSPLIT[k]].append((e[p1],e[p2])) SINTER[min(NSPLIT[e[p1]],NSPLIT[e[p2]])].append((e[p2],e[p1])) # Case 2: you may be at an external boundary element if np.count_nonzero( IPOBO[e] ) > 1: for p1,p2 in zip([0,1,2],[1,2,0]): if IPOBO[e[p1]] != 0 and IPOBO[e[p2]] != 0: # multiplier is not possible if IPOBO[e[p1]] + 1 == IPOBO[e[p2]]: SNHALO[KSPLIT[k]].append((e[p1],e[p2])) else: PNODDS[KSPLIT[k]].append([e[p1],e[p2]]) pbar.update(k) pbar.finish() # ~~> Clean-up of funny segments looping on themselves for part in range(NPARTS): # ~~> Quickly checking through to remove duplicate segments found = True while found: found = False INTER = np.array( SINTER[part], dtype=[ ('h',int),('t',int) ] ) HEADT = np.argsort( INTER['h'] ) HLINK = np.searchsorted(INTER['h'][HEADT],INTER['t'][HEADT]) w = 0 while w < len(HLINK): if HLINK[w] < len(HLINK): if INTER['h'][HEADT[w]] == INTER['t'][HEADT[HLINK[w]]] and INTER['t'][HEADT[w]] == INTER['h'][HEADT[HLINK[w]]]: print ' ~> Removing dupicate segments in part: ',part,SINTER[part][HEADT[w]],SINTER[part][HEADT[HLINK[w]]] if HEADT[w] > HEADT[HLINK[w]]: SINTER[part].pop(HEADT[w]) SINTER[part].pop(HEADT[HLINK[w]]) else: SINTER[part].pop(HEADT[HLINK[w]]) SINTER[part].pop(HEADT[w]) found = True break w += 1 return SINTER,SNHALO,PNODDS def getIKLE(self,npart): # ~~> get IKLE for that part ... still with global element numbers GIKLE = np.compress( self.KSPLIT==npart,self.slf.IKLE,axis=0 ) KELLG = np.compress( self.KSPLIT==npart,range(len(self.slf.IKLE)),axis=0 ) # ~~> KNOLG(NPOIN3) gives the global node number such that # for i = 1,NPOIN3: Fwrite(i) = Fread(KNOLG(i)) and is ordered KNOLG,indices = np.unique( np.ravel(GIKLE), return_index=True ) KNOGL = dict(zip( KNOLG,range(len(KNOLG)) )) LIKLE = - np.ones_like(GIKLE,dtype=np.int) pbar = ProgressBar(maxval=len(GIKLE)).start() for k in range(len(GIKLE)): LIKLE[k] = [ KNOGL[GIKLE[k][0]], KNOGL[GIKLE[k][1]], KNOGL[GIKLE[k][2]] ] pbar.update(k) pbar.finish() return LIKLE,KELLG,KNOLG def resetPartition(self,part,PINTER,KSPLIT): MASKER = np.zeros(self.slf.NPOIN2,dtype=np.int) for p in PINTER: MASKER[p] = np.arange(len(p))+1 # PINTER is ordered KIKLE = np.compress(np.maximum(*(MASKER[self.slf.IKLE].T))>=0,range(len(self.slf.IKLE))) #KIKLE = np.compress(np.count_nonzero(MASKER[self.slf.IKLE],axis=1)>2,range(len(self.slf.IKLE))) # /!\ does not work ? pbar = ProgressBar(maxval=len(KIKLE)).start() for k in KIKLE: e = self.slf.IKLE[k] if np.count_nonzero( MASKER[e] ) < 2 or KSPLIT[k] == part: continue for p1,p2 in zip([0,1,2],[1,2,0]): if MASKER[e[p1]] > 0 and MASKER[e[p2]] > 0 and MASKER[e[p2]] > MASKER[e[p1]]: print ' ~> Warning for element of part: ',part,'(was:',KSPLIT[k],') ',k,e #KSPLIT[k] = part pbar.update(k) pbar.finish() return KSPLIT def joinPairs(self,polyLines): INTER = np.array( polyLines, dtype=[ ('h',int),('t',int) ] ) IDONE = np.ones( len(polyLines),dtype=np.int ) polyA = []; polyZ = []; polyL = [] # ~~> Finding the endings HEADT = np.argsort( INTER['h'] ) # knowing that INTER[HEADT] is sorted by the head HLINK = np.searchsorted(INTER['h'][HEADT],INTER['t'][HEADT]) # INTER['h'][HEADT] is sorted # ... HLINK[w] for w in INTER['t'] gives you the position of INTER['t'][w] in INTER['h'][HEADT] w = min(np.compress(np.not_equal(IDONE,IDONE*0),range(len(HEADT)))) po = INTER['h'][HEADT[w]]; pe = INTER['t'][HEADT[w]]; IDONE[w] = 0 polyA.append(po) swapMinMax = True while True: if HLINK[w] < len(INTER): if INTER['t'][HEADT][w] == INTER['h'][HEADT][HLINK[w]]: w = HLINK[w] pe = INTER['t'][HEADT][w]; IDONE[w] = 0 if pe not in polyA: if HLINK[w] < len(INTER): if INTER['t'][HEADT][w] != po and INTER['t'][HEADT][w] == INTER['h'][HEADT][HLINK[w]]: continue if po == pe: polyL.append(pe) else: if pe not in polyZ: polyZ.append(pe) else: polyA.append(po) if np.count_nonzero(IDONE) == 0: break if swapMinMax: w = max(np.compress(np.not_equal(IDONE,IDONE*0),range(len(HEADT)))) else: w = min(np.compress(np.not_equal(IDONE,IDONE*0),range(len(HEADT)))) swapMinMax = not swapMinMax po = INTER['h'][HEADT[w]]; pe = INTER['t'][HEADT[w]]; IDONE[w] = 0 polyA.append(po) # ~~> Finding the sources TAILT = np.argsort( INTER['t'] ) # knowing that INTER[TAILT] is sorted by the tail TLINK = np.searchsorted(INTER['t'][TAILT],INTER['h'][TAILT]) # INTER['h'][HEADT] is sorted # ... TLINK[w] for w in polyZ gives you the position of polyZ[w] in INTER['t'][TAILT] polyGones = [] # ~~> Finding the sources of non-looping lines TAILS = np.searchsorted(INTER['t'][TAILT],polyZ) for w in TAILS: p = [INTER['t'][TAILT[w]]] while True: if INTER['h'][TAILT][w] == INTER['t'][TAILT][TLINK[w]]: po = [INTER['h'][TAILT][w]] po.extend(p) p = po; w = TLINK[w] if TLINK[w] < len(INTER): if INTER['h'][TAILT][w] == INTER['t'][TAILT][TLINK[w]]: continue po = [INTER['h'][TAILT][w]] po.extend(p) p = po break polyGones.append(p) # ~~> Finding the sources of looping lines LOOPS = np.searchsorted(INTER['t'][TAILT],polyL) for w in LOOPS: p = [INTER['t'][TAILT[w]]] while True: if INTER['h'][TAILT][w] == INTER['t'][TAILT][TLINK[w]]: po = [INTER['h'][TAILT][w]] po.extend(p) p = po; w = TLINK[w] if INTER['h'][TAILT][w] != p[len(p)-1]: continue po = [INTER['h'][TAILT][w]] po.extend(p) p = po break polyGones.append(p) return polyGones def joinSegments(self,polyLines): polyGones = [] maxbar = max(len(polyLines),1) pbar = ProgressBar(maxval=maxbar).start() while polyLines != []: # ~~> starting point e = polyLines[0] le = len(e) a,b = e[0],e[len(e)-1] # ~~> case of closed line if a == b: polyGones.append(e[0:len(e)]) # /!\ here you keep the duplicated point polyLines.pop(0) continue # ~~> iterative process for ei,iline in zip(polyLines[1:],range(len(polyLines))[1:]): # ~~> merging the two segments if b == ei[0]: polyLines[0] = e[0:len(e)] # copy ! polyLines[0].extend(ei[1:]) polyLines.pop(iline) break if a == ei[len(ei)-1]: polyLines[0] = ei[0:len(ei)] # copy ! polyLines[0].extend(e[1:]) polyLines.pop(iline) break # ~~> completed search if le == len(polyLines[0]): polyGones.append(e[0:len(e)]) polyLines.pop(0) pbar.update(maxbar-len(polyLines)) pbar.finish() return polyGones def tetrisOddSegments(self,main,odds): polyGones = [] lo = len(odds) while main != []: # ~~> starting point e = main[0] le = len(e) a,b = e[0],e[len(e)-1] # ~~> case of closed line if a == b: polyGones.append(e[0:len(e)]) # /!\ here you keep the duplicated point main.pop(0) continue # ~~> iterative process for ei,iline in zip(odds,range(len(odds))): # ~~> merging the two segments if b == ei[0]: main[0] = e[0:len(e)] main[0].extend(ei[1:]) odds.pop(iline) break if a == ei[len(ei)-1]: main[0] = ei[0:len(ei)] main[0].extend(e[1:]) odds.pop(iline) break # ~~> completed search if le == len(main[0]): polyGones.append(e[0:len(e)]) main.pop(0) # ~~> removing the over-constrained elements for p in polyGones: if len(p) > 3: j = 2 while j < len(p): if p[j-2] == p[j]: p.pop(j-2) p.pop(j-2) j += 1 return polyGones # Filter poly according to IPOBO on that part. # ~> gloseg: is the ensemble of either closed islands or # open external boundary segments # Note: filtering now seems to mean that to have done a lot of work for nothing def globalSegments(self,poly): gloseg = [] for p in poly: pA = p[0]; pZ = p[len(p)-1]; closed = False if pA == pZ and self.IPOBO[pA] != 0: closed = True iA = 0; iZ = 0 ploseg = [] for i in p: if self.IPOBO[i] != 0: # moves the counter along for external points iZ += 1 elif iZ != 0: # you have just found the end of an external segment ploseg.append(p[iA:iA+iZ]) iA += iZ+1 iZ = 0 else: iA += 1 if iZ != 0: if closed and len(ploseg) > 0: i = p[iA:iA+iZ] i.extend(ploseg[0][1:]) # remove duplicate ploseg[0] = i else: ploseg.append(p[iA:iA+iZ]) gloseg.extend(ploseg) return gloseg def putContent(self): # ~~> Extension for parallel file names fmtn = '00000' + str(self.NPARTS-1) fmtn = fmtn[len(fmtn)-5:] print '\n... Split the boundary connectivity' # ~~> Assemble internal and external segments polyCLOSED = dict([ (i,[]) for i in range(self.NPARTS) ]) polyFILTER = dict([ (i,[]) for i in range(self.NPARTS) ]) polyGLOSED = [] for part in range(self.NPARTS): # this could be done in parallel print ' +> Joining up boundary segments for part: ',part+1 # ~~> Joining up boundaries for sub-domains print ' ~> main internal segments' self.PINTER[part] = self.joinPairs(self.PINTER[part]) print ' ~> main external segments' polyHALO = self.joinPairs(self.PNHALO[part]) polyHALO.extend(self.PINTER[part]) polyHALO = self.joinSegments(polyHALO) print ' ~> odd segments' polyODDS = self.joinSegments(self.PNODDS[part]) print ' ~> stitching with the odd ones' polyGones = self.tetrisOddSegments(polyHALO,polyODDS) print ' ~> final closure' polyCLOSED[part] = self.joinSegments(polyGones) # ~~> Building up the entire picture polyFILTER[part] = self.globalSegments(polyCLOSED[part]) polyGLOSED.extend( polyFILTER[part] ) # ~~> Joining up boundaries for the global domain (Note: seems counter productive but is not) polyGLOSED = self.joinSegments(polyGLOSED) if self.isDOMAIN != '': print '\n... Printing the domain split into a series of i2s files' # ~~> Convert node numbers into x,y for part in range(self.NPARTS): print ' +> part ',part+1,' of ',self.NPARTS polyXY = [] for pg in range(len(polyCLOSED[part])): pxy = [] for pt in range(len(polyCLOSED[part][pg])): n = polyCLOSED[part][pg][pt] pxy.append([ self.slf.MESHX[n],self.slf.MESHY[n] ]) polyXY.append(pxy) # ~~> Write polygons to double check fmti = '00000' + str(part) fmti = fmti[len(fmti)-5:] fileName = path.join(path.dirname(self.slf.file['name']),self.isDOMAIN+fmtn+'-'+fmti+'.i2s') putInS(fileName,[],'i2s',polyXY) # ~~> Convert node numbers into x,y polyXY = [] for pg in range(len(polyGLOSED)): pxy = [] for pt in range(len(polyGLOSED[pg])): n = polyGLOSED[pg][pt] pxy.append([ self.slf.MESHX[n],self.slf.MESHY[n] ]) polyXY.append(pxy) # ~~> Write polygons to double check fileName = path.join(path.dirname(self.slf.file['name']),self.isDOMAIN+'.i2s') putInS(fileName,[],'i2s',polyXY) print '\n... Final check to the element partitioning' for part in range(self.NPARTS): # this could be done in parallel self.KSPLIT = self.resetPartition(part,self.PINTER[part],self.KSPLIT) if self.isDOMAIN != '': # ~~> This is optional print '\n... Printing the domain split into a SELAFIN' fileRoot,fileExts = path.splitext(self.slf.file['name']) self.slf.fole.update({ 'hook': open(fileRoot+'_PROCS'+fileExts,'wb') }) self.slf.appendHeaderSLF() self.slf.appendCoreTimeSLF(0) VARSOR = self.slf.getVALUES(0) for v in range(self.slf.NVAR): VARSOR[v] = self.NSPLIT self.slf.appendCoreVarsSLF(VARSOR) self.slf.fole['hook'].close() print '\n... Storing the global liquid boundary numbering (NUMLIQ)' # ~~> Implying NUMLIQ and the number NFRLIQ based on the joined-up lines self.clm.setNUMLIQ(polyGLOSED) print '\n... Split the mesh connectivity' # ~~> Preliminary set up for LIKLE, KNOLG and KEMLG by parts LIKLE = dict([ (i,[]) for i in range(self.NPARTS) ]) KELLG = dict([ (i,[]) for i in range(self.NPARTS) ]) KNOLG = dict([ (i,[]) for i in range(self.NPARTS) ]) for part in range(self.NPARTS): print ' +> re-ordering IKLE for part ',part+1 LIKLE[part],KELLG[part],KNOLG[part] = self.getIKLE(part) # ~~> CONLIM file: Preliminary set up of IFAPAR and ISEG for all parts IFAPAR = dict([ (i,{}) for i in range(self.NPARTS) ]) ISEG = {} # Organising ISEG for easier call: part 1 for part in range(self.NPARTS): for i in polyFILTER[part]: if i[0] == i[len(i)-1]: continue # /!\ you are here adding one ! if i[0] in ISEG: ISEG[i[0]].update({ part:i[1]+1 }) else: ISEG.update({ i[0]:{ part:i[1]+1 } }) if i[len(i)-1] in ISEG: ISEG[i[len(i)-1]].update({ part:-i[len(i)-2]-1 }) else: ISEG.update({ i[len(i)-1]:{ part:-i[len(i)-2]-1 } }) # Switching parts of ISEG for final call: part 2 for i in ISEG: if len(ISEG[i]) != 2: print '... You have a boundary node surounded with more than two boundary segments: ',i sys.exit(1) parts = ISEG[i].keys() ISEG[i] = { parts[0]:ISEG[i][parts[1]], parts[1]:ISEG[i][parts[0]] } # ~~> CONLIM file: Preliminary set up of NPTIR for all parts NPTIR = dict([ (i,{}) for i in range(self.NPARTS) ]) for part in range(self.NPARTS): for p in self.PINTER[part]: NPTIR[part].update( dict([ (i,[]) for i in p ]) ) parts = range(self.NPARTS) while parts != []: part = parts[0] parts.pop(0) for ip in NPTIR[part]: for ipart in parts: if ip in NPTIR[ipart]: NPTIR[part][ip].append(ipart) NPTIR[ipart][ip].append(part) print '... Split of the SELAFIN file' for part in range(self.NPARTS): fmti = '00000' + str(part) fmti = fmti[len(fmti)-5:] print ' +> part ',part+1,' of ',self.NPARTS self.slfn.IKLE2 = LIKLE[part] self.slfn.NELEM2 = len(LIKLE[part]) self.slfn.NPOIN2 = len(KNOLG[part]) # ~~> IPARAM has two new values: 8:NPTFR and 9:NPTIR self.slfn.IPARAM[7] = len(np.unique(np.concatenate(polyFILTER[part]))) self.slfn.IPARAM[8] = len(NPTIR[part]) # ~~> IPOBO (or IRAND) converted into KNOLG[part] self.slfn.IPOBO = KNOLG[part]+1 print ' ~> filtering the MESH' # ~~> GEO file: MESH coordinates self.slfn.MESHX = np.zeros(self.slfn.NPOIN2,dtype=np.float32) self.slfn.MESHY = np.zeros(self.slfn.NPOIN2,dtype=np.float32) self.slfn.MESHX = self.slf.MESHX[KNOLG[part]] self.slfn.MESHY = self.slf.MESHY[KNOLG[part]] # ~~> GEO file: File names fileRoot,fileExts = path.splitext(self.slf.file['name']) self.slfn.file['name'] = fileRoot+fmtn+'-'+fmti+fileExts # ~~> GEO file: Printing print ' ~> printing: ',self.slfn.file['name'] self.slfn.fole.update({ 'hook': open(self.slfn.file['name'],'wb') }) self.slfn.appendHeaderSLF() LVARSOR = np.zeros((self.slfn.NVAR,self.slfn.NPOIN2),dtype=np.float32) for t in range(len(self.slf.tags['times'])): self.slfn.appendCoreTimeSLF(t) VARSOR = self.slf.getVALUES(t) for v in range(self.slfn.NVAR): LVARSOR[v] = VARSOR[v][KNOLG[part]] self.slfn.appendCoreVarsSLF(LVARSOR) self.slfn.fole['hook'].close() if not self.isCONLIM: return print '\n... Connect elements across internal boundaries (IFAPAR)' for part in range(self.NPARTS): print ' +> part ',part+1,' of ',self.NPARTS # ~~> CONLIM file: Preliminary set up of PEHALO elements accross internal boundaries PEHALO = {}; SEHALO = {} # Step 1: find out about the primary elements and loop through IKLE self.NSPLIT *= 0 MASKER = NPTIR[part].keys() self.NSPLIT[MASKER] += 1 print ' ~> Assembling primary elements with other side' # Sub Step 1: Assembling all edges from the other sides maxbar = 0; ibar = 0 for ip in range(self.NPARTS): maxbar += len(LIKLE[ip]) pbar = ProgressBar(maxval=maxbar).start() for otherpart in range(self.NPARTS): if otherpart == part: continue # all parts are still positive at this stage for k in range(len(LIKLE[otherpart])): ibar += 1 e = self.slf.IKLE[KELLG[otherpart][k]] if np.count_nonzero( self.NSPLIT[e] ) < 2: continue for p1,p2 in zip([1,2,0],[0,1,2]): # reverse order because looking from the other side if self.NSPLIT[e[p1]] > 0 and self.NSPLIT[e[p2]] > 0: if not (e[p1],e[p2]) in PEHALO: PEHALO.update({ (e[p1],e[p2]):[0,[]] }) PEHALO[(e[p1],e[p2])][1].append(k) PEHALO[(e[p1],e[p2])][1].append(otherpart) pbar.update(ibar) # Sub Step 2: Assembling all edges from the primary side (there are three times more of them) for k in range(len(LIKLE[part])): ibar += 1 j = KELLG[part][k] e = self.slf.IKLE[j] if np.count_nonzero( self.NSPLIT[e] ) < 2: continue for p1,p2,p3 in zip([0,1,2],[1,2,0],[2,0,1]): if self.NSPLIT[e[p1]] > 0 and self.NSPLIT[e[p2]] > 0: if (e[p1],e[p2]) in PEHALO: # the good side opposes the dark side PEHALO[(e[p1],e[p2])][0] = k if self.NSPLIT[e[p3]] == 0: self.NSPLIT[e[p3]] = -1 if self.NSPLIT[e[p3]] == -1: if not (e[p1],e[p3]) in SEHALO: SEHALO.update({ (e[p1],e[p3]):[] }) SEHALO[(e[p1],e[p3])].append(k) if not (e[p2],e[p3]) in SEHALO: SEHALO.update({ (e[p2],e[p3]):[] }) SEHALO[(e[p2],e[p3])].append(k) else: # self.NSPLIT[e[p3]] must be 2 ! if not (e[p3],e[p1]) in SEHALO: SEHALO.update({ (e[p3],e[p1]):[] }) if k not in SEHALO[(e[p3],e[p1])]: SEHALO[(e[p3],e[p1])].append(k) if not (e[p2],e[p3]) in SEHALO: SEHALO.update({ (e[p2],e[p3]):[] }) if k not in SEHALO[(e[p2],e[p3])]: SEHALO[(e[p2],e[p3])].append(k) if self.KSPLIT[j] >= 0: self.KSPLIT[j] = -(self.KSPLIT[j]+1) # /!\ This is very dangerous but necessary pbar.update(ibar) pbar.finish() # Sub Step 3: Final clean up of the other side ? no need but check later for (ei)[0] == 0 # Step 2: find out about the secondary elements on IKLE ( local LIKLE ? ) print ' ~> Assembling secondary elements of that side' pbar = ProgressBar(maxval=len(LIKLE[part])).start() for k in range(len(LIKLE[part])): j = KELLG[part][k] e = self.slf.IKLE[j] if self.KSPLIT[j] != part: continue if np.count_nonzero( self.NSPLIT[e] ) < 2: continue for i in [0,1,2]: ii = (i+1)%3 if self.NSPLIT[e[i]] > 0 and self.NSPLIT[e[ii]] < 0 and (e[i],e[ii]) in SEHALO: SEHALO[(e[i],e[ii])].append(k) # correct orientation if self.NSPLIT[e[i]] > 0 and self.NSPLIT[e[ii]] > 0 and (e[ii],e[i]) in SEHALO: SEHALO[(e[ii],e[i])].append(k) # opposite orientation ii = (i+2)%3 if self.NSPLIT[e[i]] > 0 and self.NSPLIT[e[ii]] < 0 and (e[i],e[ii]) in SEHALO: SEHALO[(e[i],e[ii])].append(k) # correct orientation if self.NSPLIT[e[i]] > 0 and self.NSPLIT[e[ii]] > 0 and (e[i],e[ii]) in SEHALO: SEHALO[(e[i],e[ii])].append(k) # opposite orientation if self.KSPLIT[j] < 0: self.KSPLIT[j] = -self.KSPLIT[j] - 1 # /!\ back to a safe place pbar.update(k) pbar.finish() # Step 3: finally cross reference information between SEHALO and PEHALO print ' ~> Combining sides surrounding the halo-elements' for ie in PEHALO: if PEHALO[ie][0] == 0: continue k = PEHALO[ie][0] # element number in its local part numbering if not k in IFAPAR[part]: IFAPAR[part].update({ k:[-2,-1,-2,-1,-2,-1] }) j = KELLG[part][k] e = self.slf.IKLE[j] for p1,p2 in zip([0,1,2],[1,2,0]): if (e[p1],e[p2]) in SEHALO: if len(SEHALO[(e[p1],e[p2])]) > 1: if SEHALO[(e[p1],e[p2])][0] == k: IFAPAR[part][k][2*p1] = SEHALO[(e[p1],e[p2])][1] if SEHALO[(e[p1],e[p2])][1] == k: IFAPAR[part][k][2*p1] = SEHALO[(e[p1],e[p2])][0] IFAPAR[part][k][1+2*p1] = part if (e[p2],e[p1]) in SEHALO: if len(SEHALO[(e[p2],e[p1])]) > 1: if SEHALO[(e[p2],e[p1])][0] == k: IFAPAR[part][k][2*p1] = SEHALO[(e[p2],e[p1])][1] if SEHALO[(e[p2],e[p1])][1] == k: IFAPAR[part][k][2*p1] = SEHALO[(e[p2],e[p1])][0] IFAPAR[part][k][1+2*p1] = part if ie == (e[p1],e[p2]): IFAPAR[part][k][2*p1] = PEHALO[ie][1][0] IFAPAR[part][k][1+2*p1] = PEHALO[ie][1][1] # ~~> CONLIM file: Write to file ... pfuuuuuh ... this is it ! print '\n... Split of the CONLIM files' for part in range(self.NPARTS): fmti = '00000' + str(part) fmti = fmti[len(fmti)-5:] print ' +> part: ',part+1,' of ',self.NPARTS # ~~> CONLIM file: Set the filter INDEX = np.zeros_like(self.clm.INDEX,dtype=np.int) for contour in polyFILTER[part]: # ~~> Closed contour: no need to change ISEG if contour[0] == contour[len(contour)-1]: for c in contour[1:]: INDEX[self.clm.KFRGL[c]] = self.clm.KFRGL[c]+1 # ~~> Open contour: need to change ISEG with neighbours else: for c in contour[0:]: INDEX[self.clm.KFRGL[c]] = self.clm.KFRGL[c]+1 iA = self.clm.KFRGL[contour[0]] self.clm.POR['is'][iA] = ISEG[contour[0]][part] self.clm.POR['xs'][iA] = self.slf.MESHX[abs(ISEG[contour[0]][part])-1] # /!\ MESHX start at 0 self.clm.POR['ys'][iA] = self.slf.MESHY[abs(ISEG[contour[0]][part])-1] # /!\ MESHY start at 0 iA = self.clm.KFRGL[contour[len(contour)-1]] self.clm.POR['is'][iA] = ISEG[contour[len(contour)-1]][part] self.clm.POR['xs'][iA] = self.slf.MESHX[abs(ISEG[contour[len(contour)-1]][part])-1] self.clm.POR['ys'][iA] = self.slf.MESHY[abs(ISEG[contour[len(contour)-1]][part])-1] self.clm.INDEX = INDEX # ~~> CONLIM file: Set the NPTIR and CUTs self.clm.NPTIR = NPTIR[part] # ~~> CONLIM file: Set the IFAPAR self.clm.IFAPAR = IFAPAR[part] # ~~> CONLIM file fileRoot,fileExts = path.splitext(self.clm.fileName) print ' ~> printing: ',fileRoot+fmtn+'-'+fmti+fileExts self.clm.putContent(fileRoot+fmtn+'-'+fmti+fileExts) return
class Dumper2D(Caster): def __init__(self, caster, dump): Caster.__init__(self, { 'object': caster.object, 'obdata': caster.obdata }) self.obtype = dump['saveas'] # the type of file, 'slf' most probably self.oudata = None # the loaded SELAFIN object itself, most probably #self.obdump = dumpSELAFIN() def add(self, typl, what): Caster.add(self, typl, what) # ~~> output from for 2D file if self.obtype == 'slf': #self.obdump.add(self.object[what['file']]) cast = self.get(typl, what) support = cast.support values = cast.values if len(support) != 3: print '... not enough information to save as 2d variable' sys.exit(1) obj = self.object[what['file']] # ~~ SELAFIN header ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ if not self.oudata: self.oudata = SELAFIN('') # create the out header self.oudata.TITLE = '' # TODO: pass it on from what and deco self.oudata.NBV1 = 0 self.oudata.VARNAMES = [] self.oudata.VARUNITS = [] self.oudata.IPARAM = obj.IPARAM self.oudata.IPARAM[6] = 1 # 3D being forced to 2D self.oudata.NDP2 = len(support[2][0]) if np.all([obj.IKLE2, support[2]]): self.oudata.IKLE2 = support[3] self.oudata.IPOB2 = np.zeros(len(supoort[0]), dtype=np.int) self.oudata.MESHX = support[0] self.oudata.MESHY = support[1] else: self.oudata.IKLE2 = obj.IKLE2 self.oudata.IPOB2 = obj.IPOB2 # IPOBO missing from support self.oudata.MESHX = obj.MESHX self.oudata.MESHY = obj.MESHY self.oudata.NELEM2 = len(self.oudata.IKLE2) self.oudata.NPOIN2 = len(self.oudata.MESHX) self.oudata.NELEM3 = self.oudata.NELEM2 self.oudata.NPOIN3 = self.oudata.NPOIN2 self.oudata.NDP3 = self.oudata.NDP2 self.oudata.NPLAN = 1 vars, vtypes = whatVarsSLF(what['vars'], obj.VARNAMES) self.oudata.NBV1 = self.oudata.NBV1 + len(vars[0]) self.oudata.NBV2 = 0 self.oudata.NVAR = self.oudata.NBV1 + self.oudata.NBV2 self.oudata.CLDNAMES = [] self.oudata.CLDUNITS = [] self.oudata.VARINDEX = range(self.oudata.NVAR) for ivar, ival in zip(vars[0], range(len(vars[0]))): self.oudata.VARNAMES.append(obj.VARNAMES[ivar]) self.oudata.VARUNITS.append(obj.VARUNITS[ivar]) self.obdata.update({obj.VARNAMES[ivar]: [values[ival]]}) if max(self.oudata.IPARAM[9], obj.IPARAM[9]) > 0: if self.oudata.DATETIME != obj.DATETIME: self.oudata.IPARAM[9] = 0 if self.oudata.NELEM2 != obj.NELEM2 or self.oudata.NPOIN2 != obj.NPOIN2: print '... mismatch between the 2D sizes of layers of a same save2d object ' sys.exit(1) self.oudata.IKLE3 = self.oudata.IKLE2 self.oudata.IPOB3 = self.oudata.IPOB2 # ~~> unkonwn else: # TODO: raise exception print '... do not know how to write to this format: ' + self.obtype sys.exit(1) def save(self, fileName): # gather common information for the final header if self.obtype == 'slf': self.oudata.fole = {} self.oudata.fole.update({'name': fileName}) self.oudata.fole.update( {'endian': ">"}) # "<" means little-endian, ">" means big-endian self.oudata.fole.update({'float': ('f', 4)}) #'f' size 4, 'd' = size 8 self.oudata.fole.update({'hook': open(fileName, 'wb')}) self.oudata.appendHeaderSLF() self.oudata.appendCoreTimeSLF(0.0) # TODO: recover track of time for ivar in self.oudata.VARNAMES: self.oudata.appendCoreVarsSLF(self.obdata[ivar]) self.oudata.fole['hook'].close() # ~~> unkonwn else: # TODO: raise exception print '... do not know how to write to this format: ' + self.obtype sys.exit(1)
from samplers.meshes import crossMesh,sliceMesh from parsers.parserStrings import parseArrayPoint ################################################################################ ##### MAIN PROGRAM ########## ################################################################################ if __name__ == "__main__": ############################################################################ ##### Importing data ###### ############################################################################ # Cross section from Selafin file slf = SELAFIN("sis_sandpit.slf") slf.setKDTree() slf.setMPLTri() variable = 'bottom:line' coordinates = '(50.0;0.5)(130.0;0.5)' timef = [20] vars = subsetVariablesSLF(variable,slf.VARNAMES) xyo = []; zpo = [] for xyi,zpi in parseArrayPoint(coordinates,slf.NPLAN): if type(xyi) == type(()): xyo.append(xyi) else: xyo.append( (slf.MESHX[xyi],slf.MESHY[xyi]) ) for p in zpi: if p not in zpo: zpo.append(p) xys,support2d = sliceMesh(xyo,slf.IKLE2,slf.MESHX,slf.MESHY,slf.tree)
def draw(self,type,what,fig): # ~~> Load data slf = SELAFIN(what['file']) # /!\ WACLEO: Temporary fix because TOMAWAC's IOs names are not yet standard TELEMAC if 'WACLEO' in type.upper() or \ 'SELAFIN' in type.upper(): # ~~> Extract data elements = None; edges = []; edgexy = [] # ~~> Deco xmin = np.min(slf.MESHX); xmax = np.max(slf.MESHX) ymin = np.min(slf.MESHY); ymax = np.max(slf.MESHY) if what.has_key('roi'): if what['roi'] != []: xmin = min(what['roi'][0][0],what['roi'][1][0]) xmax = max(what['roi'][0][0],what['roi'][1][0]) ymin = min(what['roi'][0][1],what['roi'][1][1]) ymax = max(what['roi'][0][1],what['roi'][1][1]) deco['roi'] = [[xmin,ymin],[xmax,ymax]] for var in what["vars"].split(';'): v,t = var.split(':') if "mesh" in t: # ~~> Extract mesh connectivity if elements == None: elements = np.dstack((slf.MESHX[slf.IKLE],slf.MESHY[slf.IKLE])) # ~~> Draw (works with triangles and quads) drawMesh2DElements(plt,elements,deco) elif "wire" in t: # ~~> Extract unique edges and outline /!\ assumes all same clowise orientation if edges == []: for e in slf.IKLE: for n in range(slf.NDP): if (e[n],e[(n+1)%slf.NDP]) not in edges: edges.append((e[(n+1)%slf.NDP],e[n])) # ~~> Assemble wires for e in edges: edgexy.append(( (slf.MESHX[e[0]],slf.MESHY[e[0]]) , (slf.MESHX[e[1]],slf.MESHY[e[1]]) )) # ~~> Draw (works with triangles and quads) drawMeshLines(plt,edgexy,deco) else: # ~~> Extract variable data VARSORS = [] frame = 0 if what.has_key('time'): frame = int(what['time'][0]) if frame < 0: frame = max( 0, len(slf.tags['cores']) + frame ) slf.file.seek(slf.tags['cores'][frame]) slf.file.read(4+4+4) for ivar in range(slf.NVAR): slf.file.read(4) if v.upper() in slf.VARNAMES[ivar].strip(): VARSORS.append(np.asarray(unpack('>'+str(slf.NPOIN3)+'f',slf.file.read(4*slf.NPOIN3)))) else: slf.file.read(4*slf.NPOIN3) slf.file.read(4) # ~~> Multi-variables calculations MESHX = np.array(slf.MESHX); MESHY = np.array(slf.MESHY) if len(VARSORS) > 1: if "arrow" in t or "angle" in t: if what['extract'] != []: dx = (xmax-xmin)/what['extract'][0][0] dy = (ymax-ymin)/what['extract'][0][1] grid = np.meshgrid(np.arange(xmin, xmax+dx, dx),np.arange(ymin, ymax+dy, dy)) MESHX = np.concatenate(grid[0]); MESHY = np.concatenate(grid[1]) le,ln,bn = xyLocateMeshSLF(np.dstack((MESHX,MESHY))[0],slf.NELEM3,slf.IKLE,slf.MESHX,slf.MESHY) VARSOR = [np.zeros(len(le),np.float32),np.zeros(len(le),np.float32)] for xy in range(len(bn)): if le[xy] >= 0: VARSOR[0][xy] = bn[xy][0]*VARSORS[0][ln[xy][0]] + bn[xy][1]*VARSORS[0][ln[xy][1]] + bn[xy][2]*VARSORS[0][ln[xy][2]] VARSOR[1][xy] = bn[xy][0]*VARSORS[1][ln[xy][0]] + bn[xy][1]*VARSORS[1][ln[xy][1]] + bn[xy][2]*VARSORS[1][ln[xy][2]] else: t = "map" VARSOR = np.sqrt(np.sum(np.power(np.dstack(VARSORS[0:2])[0],2),axis=1)) else: VARSOR = VARSORS[0] # ~~> Element types if slf.NDP == 3: IKLE = np.array(slf.IKLE) elif slf.NDP == 4: # ~~> split each quad into triangles IKLE = np.delete(np.concatenate((slf.IKLE,np.roll(slf.IKLE,2,axis=1))),np.s_[3::],axis=1) # ~~> Draw (multiple options possible) if "map" in t: drawColouredTriMaps(plt,(slf.MESHX,slf.MESHY,IKLE,VARSOR),deco) if "label" in t: drawLabeledTriContours(plt,(slf.MESHX,slf.MESHY,slf.IKLE,VARSOR),deco) if "arrow" in t: drawColouredTriVects(plt,(MESHX,MESHY,VARSOR,False),deco) if "angle" in t: drawColouredTriVects(plt,(MESHX,MESHY,VARSOR,True),deco) else: print '... do not know how to draw this format: ' + type slf.file.close() return
def main1(self): progress.setPercentage(0) progress.setText(str(ctime()) + " - Initialisation - Debut du script") #Chargement du fichier .res**************************************** slf = SELAFIN(self.donnees_d_entree['pathselafin']) #Recherche du temps a traiter *********************************************** test = False for i, time in enumerate(slf.tags["times"]): progress.setText( str(ctime()) + " - Initialisation - Temps present dans le fichier : " + str(np.float64(time))) #print str(i) +" "+ str(time) + str(type(time)) if float(time) == float(self.donnees_d_entree['temps']): test = True values = slf.getVALUES(i) if test: progress.setText( str(ctime()) + " - Initialisation - Temps traite : " + str(np.float64(self.donnees_d_entree['temps']))) else: raise GeoAlgorithmExecutionException( str(ctime()) + " - Initialisation - Erreur : \ Temps non trouve") #Recherche de la variable a traiter **************************************** test = [False, False] tabparam = [] donnees_d_entree['champs'] = QgsFields() for i, name in enumerate(slf.VARNAMES): progress.setText( str(ctime()) + " - Initialisation - Variable dans le fichier res : " + name.strip()) tabparam.append([i, name.strip()]) donnees_d_entree['champs'].append( QgsField( str(name.strip()).translate(None, "?,!.;"), QVariant.Double)) if self.donnees_d_entree['Parametre_vitesse_X'] != None: if str(name).strip( ) == self.donnees_d_entree['Parametre_vitesse_X'].strip(): test[0] = True self.donnees_d_entree['paramvalueX'] = i if str(name).strip( ) == self.donnees_d_entree['Parametre_vitesse_Y'].strip(): test[1] = True self.donnees_d_entree['paramvalueY'] = i else: self.donnees_d_entree['paramvalueX'] = None self.donnees_d_entree['paramvalueY'] = None if self.donnees_d_entree['Parametre_vitesse_X'] != None: if test == [True, True]: progress.setText( str(ctime()) + " - Initialisation - Parametre trouvee : " + str(tabparam[self.donnees_d_entree['paramvalueX']] [1]).strip() + " " + str(tabparam[ self.donnees_d_entree['paramvalueY']][1]).strip()) else: raise GeoAlgorithmExecutionException( str(ctime()) + " - Initialisation - Erreur : \ Parametre vitesse non trouve") #Chargement de la topologie du .res ******************************************** self.donnees_d_entree['mesh'] = np.array(slf.IKLE3) self.donnees_d_entree['x'] = slf.MESHX self.donnees_d_entree['y'] = slf.MESHY #Verifie que le shp n existe pas if isFileLocked(self.donnees_d_entree['pathshp'], True): raise GeoAlgorithmExecutionException( str(ctime()) + " - Initialisation - Erreur :\ Fichier shape deja charge !!") #Chargement des donnees *********************************** self.donnees_d_entree['ztri'] = [] for i in range(len(tabparam)): self.donnees_d_entree['ztri'].append(values[i]) #Lancement du thread ************************************************************************************** self.worker = Worker(donnees_d_entree) if donnees_d_entree['traitementarriereplan'] == 0: self.worker.moveToThread(self.thread) self.thread.started.connect(self.worker.run) self.worker.progress.connect(progress.setPercentage) self.worker.status.connect(progress.setText) self.worker.finished.connect(workerFinished) self.worker.finished.connect(self.worker.deleteLater) self.thread.finished.connect(self.thread.deleteLater) self.worker.finished.connect(self.thread.quit) champ = QgsFields() writercontour = VectorWriter( self.donnees_d_entree['fichierdesortie_point'], None, champ, QGis.WKBMultiPoint, QgsCoordinateReferenceSystem(str( self.donnees_d_entree['crs']))) self.thread.start() else: self.worker.run()
else: csvF.write(str(columns[i][j]) + ',') csvF.close() return if __name__ == "__main__": ############################################################################ ##### Importing data ###### ############################################################################ # Times Series from Selafin file # 5113 is the node number # 0,1,2,8 are the variable indexes slf = SELAFIN("sis_foulness.slf") series = slf.getSERIES([5113], [0, 1, 2, 8]) u = series[0][0] v = series[1][0] h = series[2][0] QSsuspension = series[3][0] # Experiment data from CSV file # always write the variable name in lower case csv = CSV() csv.getFileContent('fielddata.csv') t, QSexp = csv.getColumns('qs')
class JCOPE2(): def __init__(self, dates): # ~~~~ Initialisation ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ self.moddates = [datetime(*dates[0]), datetime(*dates[1])] jcope2vars = ['el', 't', 's', 'u', 'v'] jcope2date = [1993, 1, 1] # /!\ unknown convertion of time records into dates jcope2root = 'http://apdrc.soest.hawaii.edu/dods/public_data/FRA-JCOPE2' # ~~~~ Time records ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Extract JCOPE2 time records\n' self.experiments = [] experiment = {} # /!\ only one time period covered at this stage for jvar in jcope2vars: jcope2url = jcope2root + '/' + jvar jcope2data = open_url(jcope2url) NIT = jcope2data['time'].shape[0] print ' x ' + str(NIT) + ' records from ' + jcope2url, ITs = [] ATs = [] for itime in range(NIT): d = datetime(jcope2date[0], jcope2date[1], jcope2date[2]) + timedelta(itime) if itime == 0: print ' from: ', str(d), if itime == NIT - 1: print ' to: ', str(d) if self.moddates[0] <= d and d <= self.moddates[1]: ITs.append(itime) ATs.append(d) if ITs != []: for ivar in jcope2data.keys(): if ivar not in ['time', 'lev', 'lat', 'lon']: experiment.update({ivar: jcope2data[ivar]}) else: print '... I could not find the time to do your work' print ' ~> you may need to select a different time period' sys.exit(1) self.experiments.append((experiment, NIT, ITs, ATs)) print '\n' def getHeaderJCOPE2(self, bounds): # ~~> inheritence self.slf3d = SELAFIN('') # slf3d self.slf2d = SELAFIN('') # slf2d surface print ' +> Set SELAFIN Variables' self.slf3d.TITLE = '' self.slf3d.NBV1 = 6 self.slf3d.NVAR = 6 self.slf3d.VARINDEX = range(self.slf3d.NVAR) self.slf3d.VARNAMES = ['ELEVATION Z ', \ 'SALINITY ','TEMPERATURE ', \ 'VELOCITY U ','VELOCITY V ','VELOCITY W '] self.slf3d.VARUNITS = ['M ', \ ' ',' ', \ 'M/S ','M/S ','M/S '] self.slf2d.TITLE = self.slf3d.TITLE self.slf2d.NBV1 = self.slf3d.NBV1 - 1 self.slf2d.NVAR = self.slf2d.NBV1 self.slf2d.VARINDEX = range(self.slf2d.NVAR) self.slf2d.VARNAMES = self.slf3d.VARNAMES[0:-1] self.slf2d.VARUNITS = self.slf3d.VARUNITS[0:-1] # ~~~~ Grid coordinates ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # ~~> the whole of the 2D grid sizes print ' +> Extract JCOPE2 sizes' # /!\ 't' gives me access to NPLAN in 3D jcope2data = self.experiments[0][0]['temp'] NX1D = jcope2data['lon'].shape[0] NY1D = jcope2data['lat'].shape[0] print ' +> Extract JCOPE2 mesh' lonX1D = jcope2data['lon'].data[0:NX1D] latY1D = jcope2data['lat'].data[0:NY1D] # ~~> no correction for lat,lon # ~~> subset for the SELAFIN print ' +> Set SELAFIN mesh' self.jcope2ilon = np.where( (lonX1D >= bounds[0][1]) * (lonX1D <= bounds[1][1]))[0] self.jcope2ilat = np.where( (latY1D >= bounds[0][0]) * (latY1D <= bounds[1][0]))[0] x = lonX1D[self.jcope2ilon] y = latY1D[self.jcope2ilat] NX1D = len(x) NY1D = len(y) # ~~~~ MESH sizes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set SELAFIN sizes' # ~~> 3D self.slf3d.NPLAN = jcope2data['lev'].shape[0] self.ZPLAN = jcope2data['lev'][ 0:self.slf3d.NPLAN][::-1] # I do not know any other way self.slf3d.NDP2 = 3 self.slf3d.NDP3 = 6 self.slf3d.NPOIN2 = NX1D * NY1D self.slf3d.NPOIN3 = self.slf3d.NPOIN2 * self.slf3d.NPLAN self.slf3d.NELEM2 = 2 * (NX1D - 1) * (NY1D - 1) self.slf3d.NELEM3 = self.slf3d.NELEM2 * (self.slf3d.NPLAN - 1) self.slf3d.IPARAM = [0, 0, 0, 0, 0, 0, self.slf3d.NPLAN, 0, 0, 0] # ~~> 2D self.slf2d.NPLAN = 1 self.slf2d.NDP2 = self.slf3d.NDP2 self.slf2d.NDP3 = self.slf2d.NDP2 self.slf2d.NPOIN2 = self.slf3d.NPOIN2 self.slf2d.NPOIN3 = self.slf2d.NPOIN2 self.slf2d.NELEM2 = self.slf3d.NELEM2 self.slf2d.NELEM3 = self.slf2d.NELEM2 self.slf2d.IPARAM = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0] # ~~~~ Connectivity ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set the default SELAFIN IKLE 3D' ielem = 0 pbar = ProgressBar(maxval=self.slf3d.NELEM3).start() self.slf3d.IKLE3 = np.zeros((self.slf3d.NELEM3, self.slf3d.NDP3), dtype=np.int) for k in range(1, self.slf3d.NPLAN): for i in range(1, NX1D): for j in range(1, NY1D): ipoin = (i - 1) * NY1D + j - 1 + (k - 1) * self.slf3d.NPOIN2 # ~~> first prism self.slf3d.IKLE3[ielem][0] = ipoin self.slf3d.IKLE3[ielem][1] = ipoin + NY1D self.slf3d.IKLE3[ielem][2] = ipoin + 1 self.slf3d.IKLE3[ielem][3] = ipoin + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][ 4] = ipoin + NY1D + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][5] = ipoin + 1 + self.slf3d.NPOIN2 ielem = ielem + 1 pbar.update(ielem) # ~~> second prism self.slf3d.IKLE3[ielem][0] = ipoin + NY1D self.slf3d.IKLE3[ielem][1] = ipoin + NY1D + 1 self.slf3d.IKLE3[ielem][2] = ipoin + 1 self.slf3d.IKLE3[ielem][ 3] = ipoin + NY1D + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][ 4] = ipoin + NY1D + 1 + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][5] = ipoin + 1 + self.slf3d.NPOIN2 ielem = ielem + 1 pbar.update(ielem) pbar.finish() self.slf2d.IKLE3 = np.compress( [True, True, True, False, False, False], self.slf3d.IKLE3[0:self.slf3d.NELEM2], axis=1) #.reshape((self.slf3d.NELEM2,self.slf3d.NDP2)) # ~~~~ Boundaries ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set SELAFIN IPOBO' pbar = ProgressBar(maxval=NX1D + NY1D).start() IPOB2 = np.zeros(self.slf3d.NPOIN2, dtype=np.int) # ~~> along the x-axis (lon) for i in range(NX1D): ipoin = i * NY1D IPOB2[ipoin] = i + 1 ipoin = i * NY1D - 1 IPOB2[ipoin] = 2 * NX1D + (NY1D - 2) - i pbar.update(i) # ~~> along the y-axis (alt) for i in range(1, NY1D): ipoin = i IPOB2[ipoin] = 2 * NX1D + 2 * (NY1D - 2) - i + 1 ipoin = NY1D * (NX1D - 1) + i IPOB2[ipoin] = NX1D + i pbar.update(i + NX1D) pbar.finish() # ~~~~ Connectivity ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # /!\ 'el' gives me access to the real mesh removing elements with -99 values print ' +> Mask the non-values from the SELAFIN IKLE' jcope2data = self.experiments[0][0]['el'] var = np.swapaxes( jcope2data['el'].data[0, 0, self.jcope2ilat[0]:self.jcope2ilat[-1] + 1, self.jcope2ilon[0]:self.jcope2ilon[-1] + 1][0], 1, 2).ravel() # ~> the elements you wish to keep MASK2 = self.slf2d.IKLE3[np.where( np.sum(np.in1d( self.slf2d.IKLE3, np.compress(var > -99, np.arange(len( var)))).reshape(self.slf2d.NELEM2, self.slf2d.NDP2), axis=1) == 3)] self.slf2d.NELEM2 = len(MASK2) self.slf2d.NELEM3 = self.slf2d.NELEM2 self.slf3d.NELEM2 = self.slf2d.NELEM2 self.slf3d.NELEM3 = self.slf3d.NELEM2 * (self.slf3d.NPLAN - 1) # ~~> re-numbering IKLE2 as a local connectivity matrix KNOLG, indices = np.unique(np.ravel(MASK2), return_index=True) KNOGL = dict(zip(KNOLG, range(len(KNOLG)))) self.MASK2 = np.in1d(np.arange(len(var)), KNOLG) self.MASK3 = np.tile(self.MASK2, self.slf3d.NPLAN) self.slf2d.IKLE2 = -np.ones_like(MASK2, dtype=np.int) for k in range(len(MASK2)): self.slf2d.IKLE2[k] = [ KNOGL[MASK2[k][0]], KNOGL[MASK2[k][1]], KNOGL[MASK2[k][2]] ] self.slf3d.NPOIN2 = len(KNOLG) self.slf3d.NPOIN3 = self.slf3d.NPOIN2 * self.slf3d.NPLAN self.slf2d.NPOIN2 = self.slf3d.NPOIN2 self.slf2d.NPOIN3 = self.slf2d.NPOIN2 # ~~> re-connecting the upper floors self.slf2d.IKLE3 = self.slf2d.IKLE2 self.slf3d.IKLE2 = self.slf2d.IKLE2 self.slf3d.IKLE3 = \ np.repeat(self.slf2d.NPOIN2*np.arange(self.slf3d.NPLAN-1),self.slf2d.NELEM2*self.slf3d.NDP3).reshape((self.slf2d.NELEM2*(self.slf3d.NPLAN-1),self.slf3d.NDP3)) + \ np.tile(np.add(np.tile(self.slf2d.IKLE2,2),np.repeat(self.slf2d.NPOIN2*np.arange(2),self.slf3d.NDP2)),(self.slf3d.NPLAN-1,1)) # ~~~~ Boundaries ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ self.slf2d.IPOB2 = IPOB2[self.MASK2] self.slf2d.IPOB3 = self.slf2d.IPOB2 self.slf3d.IPOB2 = self.slf2d.IPOB2 self.slf3d.IPOB3 = np.ravel( np.add( np.repeat(self.slf2d.IPOB2, self.slf3d.NPLAN).reshape( (self.slf2d.NPOIN2, self.slf3d.NPLAN)), self.slf2d.NPOIN2 * np.arange(self.slf3d.NPLAN)).T) # ~~~~ Mesh ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set SELAFIN mesh' self.slf3d.MESHX = np.tile(x, NY1D).reshape( NY1D, NX1D).T.ravel()[self.MASK2] + 0.042 self.slf3d.MESHY = np.tile(y, NX1D)[self.MASK2] + 0.042 self.slf2d.MESHX = self.slf3d.MESHX self.slf2d.MESHY = self.slf3d.MESHY def putContent(self, rootName, only2D): nbar = 0 for e in self.experiments: nbar += len(e[2]) ilat = [self.jcope2ilat[0], self.jcope2ilat[-1] + 1] ilon = [self.jcope2ilon[0], self.jcope2ilon[-1] + 1] # ~~~~ Time records ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Extract JCOPE2 time records' self.slf3d.tags = {'times': []} # ~~~~ Start Date and Time ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ self.slf3d.tags['times'] = 86400.0 * np.arange(nbar) self.slf2d.tags = {'times': self.slf3d.tags['times']} self.slf3d.DATETIME = self.experiments[-1][3][0].timetuple()[0:6] self.slf2d.DATETIME = self.slf3d.DATETIME self.slf3d.IPARAM[9] = 1 self.slf2d.IPARAM[9] = 1 #a = np.arange(40).reshape(5,8)[::-1].ravel() # 5 plans, 8 points print ' +> Write SELAFIN headers' if not only2D: self.slf3d.fole = {} self.slf3d.fole.update({'hook': open('t3d_' + rootName, 'wb')}) self.slf3d.fole.update({'name': 't3d_' + rootName}) self.slf3d.fole.update({'endian': ">"}) # big endian self.slf3d.fole.update({'float': ('f', 4)}) # single precision self.slf3d.appendHeaderSLF() self.slf2d.fole = {} self.slf2d.fole.update({'hook': open('t2d_' + rootName, 'wb')}) self.slf2d.fole.update({'name': 't2d_' + rootName}) self.slf2d.fole.update({'endian': ">"}) # big endian self.slf2d.fole.update({'float': ('f', 4)}) # single precision self.slf2d.appendHeaderSLF() # ~~~~ Time loop(s) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #var3d = np.zeros(self.slf3d.NPOIN3,dtype=np.float) #var2d = np.zeros(self.slf2d.NPOIN3,dtype=np.float) print ' +> Write SELAFIN cores' ibar = 0 pbar = ProgressBar(maxval=6 * nbar).start() for e in self.experiments: jcope2data = e[0] i1 = min(e[2]) i2 = max(e[2]) + 1 for t in range(i1, i2): # ~~> time stamp pbar.write(' x ' + str(e[3][t - i1]), 6 * ibar + 0) pbar.update(6 * ibar + 0) if not only2D: self.slf3d.appendCoreTimeSLF(ibar) self.slf2d.appendCoreTimeSLF(ibar) # ~~> ELEVATION var2d = np.swapaxes( jcope2data['el']['el'].data[t, 0, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2).ravel()[self.MASK2] self.slf2d.appendCoreVarsSLF([var2d]) if not only2D: var3d = -np.tile(self.ZPLAN, self.slf3d.NPOIN2).reshape( self.slf3d.NPOIN2, self.slf3d.NPLAN).T.ravel() var3d[self.slf3d.NPOIN3 - self.slf3d.NPOIN2:] = var2d self.slf3d.appendCoreVarsSLF([var3d]) pbar.write(' - elevation', 6 * ibar + 1) pbar.update(6 * ibar + 1) # ~~> SALINITY if only2D: var = np.swapaxes( jcope2data['salt']['salt'].data[t, 0:1, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) else: var = np.swapaxes( jcope2data['salt']['salt'].data[t, 0:self.slf3d.NPLAN, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) var2d = var[0].ravel()[self.MASK2] self.slf2d.appendCoreVarsSLF([var2d]) if not only2D: var3d = var[::-1].ravel()[self.MASK3] for ipoin in range(self.slf3d.NPOIN2): for iplan in range(self.slf3d.NPLAN - 1, 0, -1): if var3d[ipoin + (iplan - 1) * self.slf3d.NPOIN2] < -99.0: var3d[ipoin + (iplan - 1) * self.slf3d.NPOIN2] = var3d[ ipoin + iplan * self.slf3d.NPOIN2] self.slf3d.appendCoreVarsSLF([var3d]) pbar.write(' - salinity', 6 * ibar + 2) pbar.update(6 * ibar + 2) # ~~> TEMPERATURE if only2D: var = np.swapaxes( jcope2data['temp']['temp'].data[t, 0:1, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) else: var = np.swapaxes( jcope2data['temp']['temp'].data[t, 0:self.slf3d.NPLAN, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) var2d = var[0].ravel()[self.MASK2] self.slf2d.appendCoreVarsSLF([var2d]) if not only2D: var3d = var[::-1].ravel()[self.MASK3] for ipoin in range(self.slf3d.NPOIN2): for iplan in range(self.slf3d.NPLAN - 1, 0, -1): if var3d[ipoin + (iplan - 1) * self.slf3d.NPOIN2] < -99.0: var3d[ipoin + (iplan - 1) * self.slf3d.NPOIN2] = var3d[ ipoin + iplan * self.slf3d.NPOIN2] self.slf3d.appendCoreVarsSLF([var3d]) pbar.write(' - temperature', 6 * ibar + 3) pbar.update(6 * ibar + 3) # ~~> VELOCITY U if only2D: var = np.swapaxes( jcope2data['u']['u'].data[t, 0:1, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) else: var = np.swapaxes( jcope2data['u']['u'].data[t, 0:self.slf3d.NPLAN, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) var2d = var[0].ravel()[self.MASK2] self.slf2d.appendCoreVarsSLF([var2d]) if not only2D: var3d = var[::-1].ravel()[self.MASK3] for ipoin in range(self.slf3d.NPOIN2): for iplan in range(self.slf3d.NPLAN - 1, 0, -1): if var3d[ipoin + (iplan - 1) * self.slf3d.NPOIN2] < -99.0: var3d[ipoin + (iplan - 1) * self.slf3d.NPOIN2] = var3d[ ipoin + iplan * self.slf3d.NPOIN2] self.slf3d.appendCoreVarsSLF([var3d]) pbar.write(' - u-velocity', 6 * ibar + 4) pbar.update(6 * ibar + 4) # ~~> VELOCITY V if only2D: var = np.swapaxes( jcope2data['v']['v'].data[t, 0:1, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) else: var = np.swapaxes( jcope2data['v']['v'].data[t, 0:self.slf3d.NPLAN, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) var2d = var[0].ravel()[self.MASK2] self.slf2d.appendCoreVarsSLF([var2d]) if not only2D: var3d = var[::-1].ravel()[self.MASK3] for ipoin in range(self.slf3d.NPOIN2): for iplan in range(self.slf3d.NPLAN - 1, 0, -1): if var3d[ipoin + (iplan - 1) * self.slf3d.NPOIN2] < -99.0: var3d[ipoin + (iplan - 1) * self.slf3d.NPOIN2] = var3d[ ipoin + iplan * self.slf3d.NPOIN2] self.slf3d.appendCoreVarsSLF([var3d]) pbar.write(' - v-velocity', 6 * ibar + 5) pbar.update(6 * ibar + 5) # ~~> VELOCITY W if not only2D: var3d = 0. * var3d self.slf3d.appendCoreVarsSLF([var3d]) ibar += 1 pbar.finish() if not only2D: self.slf3d.fole['hook'].close() self.slf2d.fole['hook'].close() def __del__(self): pass
import matplotlib matplotlib.use("Agg") import matplotlib.pyplot as plt import matplotlib.animation as animation from pyproj import Proj, transform import shutil import re mainDir = 'C:/windFarm/' f = mainDir + 'wind_farm_mesh_02.slf' newScan = scanSELAFIN(f) newScan.printHeader() slf = SELAFIN(f) xmin = np.min(slf.MESHX) xmax = np.max(slf.MESHX) ymin = np.min(slf.MESHY) ymax = np.max(slf.MESHY) elements = np.dstack((slf.MESHX[slf.IKLE2], slf.MESHY[slf.IKLE2])) mesh = np.array(slf.IKLE2) x = slf.MESHX y = slf.MESHY print(elements.shape) print(str(len(elements[:, 0, 0]))) nelem = len(elements[:, 0, 0])
def copyCommonData(self): SLFn = SELAFIN('') # Meta data SLFn.TITLE = self.slf.TITLE SLFn.file = self.slf.file SLFn.IPARAM = self.slf.IPARAM # Time SLFn.DATETIME = self.slf.DATETIME SLFn.tags = self.slf.tags # Variables SLFn.NBV1 = self.slf.NBV1 SLFn.VARNAMES = self.slf.VARNAMES SLFn.VARUNITS = self.slf.VARUNITS SLFn.NBV2 = self.slf.NBV2 SLFn.CLDNAMES = self.slf.CLDNAMES SLFn.CLDUNITS = self.slf.CLDUNITS SLFn.NVAR = self.slf.NVAR SLFn.VARINDEX = range(self.slf.NVAR) # Unchanged numbers SLFn.NPLAN = self.slf.NPLAN SLFn.NDP2 = self.slf.NDP2 SLFn.NDP3 = self.slf.NDP3 return SLFn
def __init__(self, fname, vals=(None, None)): # ~~> empty SELAFIN SELAFIN.__init__(self, '') self.DATETIME = [] # ~~> variables self.TITLE = '' self.NBV1 = 1 # bathymetry only self.NVAR = self.NBV1 self.VARINDEX = range(self.NVAR) self.VARNAMES = ['BOTTOM '] self.VARUNITS = ['M '] print ' +> header' # ~~> load header (ASC type) gebcofile = open(fname, 'r') # ~~ gline = [] gline.append(gebcofile.readline().split()) if gline[-1][0] == "ncols": NX1D = int(gline[-1][1]) else: print '.. Could not read this file format. Key ncols expected here.' sys.exit(1) gline.append(gebcofile.readline().split()) if gline[-1][0] == "nrows": NY1D = int(gline[-1][1]) else: print '.. Could not read this file format. Key nrows expected here.' sys.exit(1) gline.append(gebcofile.readline().split()) if gline[-1][0] == "xllcorner": xllcorner = np.float(gline[-1][1]) else: print '.. Could not read this file format. Key xllcorner expected here.' sys.exit(1) gline.append(gebcofile.readline().split()) if gline[-1][0] == "yllcorner": yllcorner = np.float(gline[-1][1]) else: print '.. Could not read this file format. Key yllcorner expected here.' sys.exit(1) gline.append(gebcofile.readline().split()) if gline[-1][0] == "cellsize": xdim = np.float(gline[-1][1]) ydim = xdim elif gline[-1][0] in ["xdim", "dx"]: xdim = np.float(gline[-1][1]) gline.append(gebcofile.readline().split()) if gline[-1][0] in ["ydim", "dy"]: ydim = np.float(gline[-1][1]) else: print '.. Could not read this file format. Key ydim expected here.' sys.exit(1) else: print '.. Could not read this file format. Key cellsize or xdim expected here.' sys.exit(1) gline.append(gebcofile.readline().split()) if gline[-1][0] == "NODATA_value": NODATA_value = int(gline[-1][1]) else: print '.. Could not read this file format. Key NODATA_value expected here.' sys.exit(1) # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ gebcofile.close() print ' +> bathymetry' # ~~> load ASCII content, ignoring the header lines z = np.loadtxt(fname, skiprows=len(gline)).T.ravel() print ' +> filtered connectivity' # ~~> temporary IKLE aval = min(z) - 1 if vals[0] != None: aval = float(vals[0]) bval = max(z) + 1 if vals[1] != None: bval = float(vals[1]) ielem = 0 pbar = ProgressBar(maxval=2 * (NX1D - 1) * (NY1D - 1)).start() ikle3 = -np.ones((2 * (NX1D - 1) * (NY1D - 1), 3), dtype=np.int) for i in range(1, NX1D): for j in range(1, NY1D): ipoin = (i - 1) * NY1D + j - 1 # ~~> first triangle if ( aval < z[ipoin] < bval ) and \ ( aval < z[ipoin + NY1D] < bval ) and \ ( aval < z[ipoin + 1] < bval ): ikle3[ielem] = [ipoin, ipoin + 1, ipoin + NY1D] ielem = ielem + 1 pbar.update(ielem) # ~~> second triangle if ( aval < z[ipoin + NY1D] < bval ) and \ ( aval < z[ipoin + NY1D + 1] < bval ) and \ ( aval < z[ipoin + 1] < bval ): ikle3[ielem] = [ipoin + NY1D, ipoin + 1, ipoin + NY1D + 1] ielem = ielem + 1 pbar.update(ielem) pbar.finish() print ' +> renumbered connectivity' # ~~> intermediate connectivity GIKLE = ikle3[np.not_equal(*(np.sort(ikle3).T[0::2]))] KNOLG = np.unique(np.ravel(GIKLE)) KNOGL = dict(zip(KNOLG, range(len(KNOLG)))) # ~~> final connectivity self.IKLE3 = -np.ones_like(GIKLE, dtype=np.int) pbar = ProgressBar(maxval=len(GIKLE)).start() for k in range(len(GIKLE)): self.IKLE3[k] = [ KNOGL[GIKLE[k][0]], KNOGL[GIKLE[k][1]], KNOGL[GIKLE[k][2]] ] pbar.update(k) pbar.finish() print ' +> mesh x,y,z' # ~~> defines grid x = xllcorner + xdim * np.arange(NX1D, dtype=np.float) - xdim / 2. y = yllcorner - ydim * np.arange( NY1D, dtype=np.float) + ydim * NY1D - ydim / 2. self.MESHX = np.tile(x, NY1D).reshape(NY1D, NX1D).T.ravel()[KNOLG] self.MESHY = np.tile(y, NX1D)[KNOLG] self.z = z[KNOLG] print ' +> sizes' # ~~> sizes self.NPLAN = 1 self.NDP2 = 3 self.NDP3 = self.NDP2 self.NPOIN2 = len(self.MESHX) self.NPOIN3 = self.NPOIN2 self.NELEM2 = len(self.IKLE3) self.NELEM3 = self.NELEM2 self.IPARAM = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0] print ' +> boundaries' # ~~> establish neighborhood neighbours = Triangulation( self.MESHX, self.MESHY, self.IKLE3).get_cpp_triangulation().get_neighbors() # ~~> build the enssemble of boundary segments ebounds = [] print ' - identify' pbar = ProgressBar(maxval=self.NELEM3).start() for i in range(self.NELEM3): if neighbours[i, 0] < 0: ebounds.append([self.IKLE3[i][0], self.IKLE3[i][1]]) if neighbours[i, 1] < 0: ebounds.append([self.IKLE3[i][1], self.IKLE3[i][2]]) if neighbours[i, 2] < 0: ebounds.append([self.IKLE3[i][2], self.IKLE3[i][0]]) pbar.update(i) pbar.finish() # ~~> assemble the enssemble of boundary segments print ' - assemble' pbounds = polygons.joinSegments(ebounds) # ~~> define IPOBO from an arbitrary start point print ' - set' self.IPOB3 = np.zeros(self.NPOIN3, dtype=np.int) iptfr = 0 for p in pbounds: for n in p[1:]: iptfr += 1 self.IPOB3[n] = iptfr self.IPOB2 = self.IPOB3
b = np.subtract(b, 1) # to create the output file #fout = open("junk.out","w") #for i in range(len(b)): # fout.write(str(b[i]) + '\n') # now we can delete the temp file os.remove(temp_nodes_file) os.remove(temp_elements_file) os.remove(temp_boundaries_file) # now to write the SELAFIN mesh file slf2d = SELAFIN('') #print ' +> Set SELAFIN variables' slf2d.TITLE = 'Converted from gmsh' slf2d.NBV1 = 1 slf2d.NVAR = 1 slf2d.VARINDEX = range(slf2d.NVAR) slf2d.VARNAMES.append('BOTTOM ') slf2d.VARUNITS.append('M ') #print ' +> Set SELAFIN sizes' slf2d.NPLAN = 1 slf2d.NDP2 = 3 slf2d.NDP3 = 3 slf2d.NPOIN2 = n slf2d.NPOIN3 = n
A script to map weather type data contained into a SELAFIN, onto a spatially and time varying SELAFIN file of your choosing (your MESH). ''')) parser.add_argument("args", default='', nargs=3) options = parser.parse_args() # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< # ~~~~ slf new mesh ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ geoFile = options.args[0] if not path.exists(geoFile): print '... the provided geoFile does not seem to exist: ' + geoFile + '\n\n' sys.exit(1) # Find corresponding (x,y) in corresponding new mesh print ' +> getting hold of the GEO file' geo = SELAFIN(geoFile) xys = np.vstack((geo.MESHX, geo.MESHY)).T # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< # ~~~~ slf existing res ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ slfFile = options.args[1] if not path.exists(slfFile): print '... the provided slfFile does not seem to exist: ' + slfFile + '\n\n' sys.exit(1) slf = SELAFIN(slfFile) slf.setKDTree() slf.setMPLTri() print ' +> support extraction' # Extract triangles and weights in 2D support2d = []
class HYCOM(): def __init__(self, dates): # ~~~~ Initialisation ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ self.moddates = [datetime(*dates[0]), datetime(*dates[1])] # ~~~~ Time records ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Extract HYCOM time records\n' hycomurls = [ \ 'http://tds.hycom.org/thredds/dodsC/GLBa0.08/expt_91.2', \ 'http://tds.hycom.org/thredds/dodsC/GLBa0.08/expt_91.1', \ 'http://tds.hycom.org/thredds/dodsC/GLBa0.08/expt_91.0', \ 'http://tds.hycom.org/thredds/dodsC/GLBa0.08/expt_90.9', \ 'http://tds.hycom.org/thredds/dodsC/GLBa0.08/expt_90.8', \ 'http://tds.hycom.org/thredds/dodsC/GLBa0.08/expt_90.6' ] self.experiments = [] for hycomurl in hycomurls: success = False while not success: try: success = True hycomdata = open_url(hycomurl) NIT = hycomdata['Date'].shape[0] print ' x ' + str( NIT) + ' records from ' + hycomurl, ITs = [] ATs = [] z = zip(hycomdata['Date'][0:NIT], range(NIT)) except: success = False print ' ... re-attempting ' for hycomdate, itime in z: d = datetime(int(str(hycomdate)[0:4]), int(str(hycomdate)[4:6]), int(str(hycomdate)[6:8])) if itime == 0: print ' from: ', str(d), if itime == NIT - 1: print ' to: ', str(d) if self.moddates[0] <= d and d <= self.moddates[1]: ITs.append(itime) ATs.append(d) if ITs != []: self.experiments.append((hycomdata, NIT, ITs, ATs, hycomurl)) print '\n' def getHeaderHYCOM(self, bounds): # ~~> inheritence self.slf3d = SELAFIN('') # slf3d self.slf2d = SELAFIN('') # slf2d surface print ' +> Set SELAFIN Variables' self.slf3d.TITLE = '' self.slf3d.NBV1 = 6 self.slf3d.NVAR = 6 self.slf3d.VARINDEX = range(self.slf3d.NVAR) self.slf3d.VARNAMES = ['ELEVATION Z ', \ 'SALINITY ','TEMPERATURE ', \ 'VELOCITY U ','VELOCITY V ','VELOCITY W '] self.slf3d.VARUNITS = ['M ', \ 'G/L ','DEGREES ', \ 'M/S ','M/S ','M/S '] self.slf2d.TITLE = self.slf3d.TITLE self.slf2d.NBV1 = self.slf3d.NBV1 + 1 self.slf2d.NVAR = self.slf3d.NVAR + 1 self.slf2d.VARINDEX = range(self.slf2d.NVAR) self.slf2d.VARNAMES = self.slf3d.VARNAMES[0:-1] self.slf2d.VARNAMES.append('EMP ') self.slf2d.VARNAMES.append('QTOT ') self.slf2d.VARUNITS = self.slf3d.VARUNITS[0:-1] self.slf2d.VARUNITS.append('??? ') self.slf2d.VARUNITS.append('??? ') # ~~> server access, # get the grid and header from the latest experiment self.hycomdata = self.experiments[0][0] # ~~~~ Grid coordinates ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ success = False while not success: try: success = True # ~~> the whole of the 2D grid sizes print ' +> Extract HYCOM sizes' NX1D = self.hycomdata['X'].shape[0] NY1D = self.hycomdata['Y'].shape[0] print ' +> Extract HYCOM mesh' lonX1D = self.hycomdata['Longitude']['Longitude'].data[ 0, 0:NX1D].ravel() % 360 latY1D = self.hycomdata['Latitude']['Latitude'].data[ 0:NY1D, 0].ravel() except: success = False print ' ... re-attempting ' # ~~> lat,lon correction for i in range(NX1D): if (lonX1D[i] > 180): lonX1D[i] = lonX1D[i] - 360.0 for i in range(2172, NY1D): latY1D[i] = 47.0 + (i - 2172) / 18.0 # ~~> subset for the SELAFIN print ' +> Set SELAFIN mesh' self.hycomilon = np.where( (lonX1D >= bounds[0][1]) * (lonX1D <= bounds[1][1]))[0] self.hycomilat = np.where( (latY1D >= bounds[0][0]) * (latY1D <= bounds[1][0]))[0] x = lonX1D[self.hycomilon] y = latY1D[self.hycomilat] NX1D = len(x) NY1D = len(y) # ~~~~ MESH sizes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # ~~> 3D success = False while not success: try: success = True print ' +> Set SELAFIN sizes' self.slf3d.NPLAN = self.hycomdata['Depth'].shape[0] self.ZPLAN = self.hycomdata['Depth'][ 0:self.slf3d.NPLAN][::-1] # I do not know any other way except: success = False print ' ... re-attempting ' self.slf3d.NDP2 = 3 self.slf3d.NDP3 = 6 self.slf3d.NPOIN2 = NX1D * NY1D self.slf3d.NPOIN3 = self.slf3d.NPOIN2 * self.slf3d.NPLAN self.slf3d.NELEM2 = 2 * (NX1D - 1) * (NY1D - 1) self.slf3d.NELEM3 = self.slf3d.NELEM2 * (self.slf3d.NPLAN - 1) self.slf3d.IPARAM = [0, 0, 0, 0, 0, 0, self.slf3d.NPLAN, 0, 0, 0] # ~~> 2D self.slf2d.NPLAN = 1 self.slf2d.NDP2 = self.slf3d.NDP2 self.slf2d.NDP3 = self.slf2d.NDP2 self.slf2d.NPOIN2 = self.slf3d.NPOIN2 self.slf2d.NPOIN3 = self.slf2d.NPOIN2 self.slf2d.NELEM2 = self.slf3d.NELEM2 self.slf2d.NELEM3 = self.slf2d.NELEM2 self.slf2d.IPARAM = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0] print ' +> Set SELAFIN mesh' self.slf3d.MESHX = np.tile(x, NY1D).reshape(NY1D, NX1D).T.ravel() self.slf3d.MESHY = np.tile(y, NX1D) self.slf2d.MESHX = self.slf3d.MESHX[0:self.slf2d.NPOIN2] self.slf2d.MESHY = self.slf3d.MESHY[0:self.slf2d.NPOIN2] # ~~~~ Connectivity ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set SELAFIN IKLE' ielem = 0 pbar = ProgressBar(maxval=self.slf3d.NELEM3).start() self.slf3d.IKLE3 = np.zeros((self.slf3d.NELEM3, self.slf3d.NDP3), dtype=np.int) for k in range(1, self.slf3d.NPLAN): for i in range(1, NX1D): for j in range(1, NY1D): ipoin = (i - 1) * NY1D + j - 1 + (k - 1) * self.slf3d.NPOIN2 # ~~> first prism self.slf3d.IKLE3[ielem][0] = ipoin self.slf3d.IKLE3[ielem][1] = ipoin + NY1D self.slf3d.IKLE3[ielem][2] = ipoin + 1 self.slf3d.IKLE3[ielem][3] = ipoin + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][ 4] = ipoin + NY1D + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][5] = ipoin + 1 + self.slf3d.NPOIN2 ielem = ielem + 1 pbar.update(ielem) # ~~> second prism self.slf3d.IKLE3[ielem][0] = ipoin + NY1D self.slf3d.IKLE3[ielem][1] = ipoin + NY1D + 1 self.slf3d.IKLE3[ielem][2] = ipoin + 1 self.slf3d.IKLE3[ielem][ 3] = ipoin + NY1D + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][ 4] = ipoin + NY1D + 1 + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][5] = ipoin + 1 + self.slf3d.NPOIN2 ielem = ielem + 1 pbar.update(ielem) pbar.finish() self.slf2d.IKLE3 = np.compress( np.repeat([True, False], self.slf2d.NDP2), self.slf3d.IKLE3[0:self.slf3d.NELEM2], axis=1) #.reshape((self.slf3d.NELEM2,self.slf3d.NDP2)) # ~~~~ Boundaries ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set SELAFIN IPOBO' pbar = ProgressBar(maxval=NX1D + NY1D).start() self.slf3d.IPOB3 = np.zeros(self.slf3d.NPOIN3, dtype=np.int) # ~~> along the x-axis (lon) for i in range(NX1D): for k in range(1, self.slf3d.NPLAN + 1): ipoin = i * NY1D + (k - 1) * (2 * NX1D + 2 * NY1D - 4) self.slf3d.IPOB3[ipoin] = i + 1 + (k - 1) * (2 * NX1D + 2 * NY1D - 4) ipoin = i * NY1D - 1 + (k - 1) * (2 * NX1D + 2 * NY1D - 4) self.slf3d.IPOB3[ipoin] = 2 * NX1D + ( NY1D - 2) - i + (k - 1) * (2 * NX1D + 2 * NY1D - 4) pbar.update(i) # ~~> along the y-axis (alt) for i in range(1, NY1D): for k in range(1, self.slf3d.NPLAN + 1): ipoin = i + (k - 1) * (2 * NX1D + 2 * NY1D - 4) self.slf3d.IPOB3[ipoin] = 2 * NX1D + 2 * (NY1D - 2) - i + 1 + ( k - 1) * (2 * NX1D + 2 * NY1D - 4) ipoin = NY1D * (NX1D - 1) + i + (k - 1) * (2 * NX1D + 2 * NY1D - 4) self.slf3d.IPOB3[ipoin] = NX1D + i + (k - 1) * (2 * NX1D + 2 * NY1D - 4) pbar.update(i + NX1D) pbar.finish() self.slf2d.IPOB3 = self.slf3d.IPOB3[0:self.slf3d.NPOIN2] def putContent(self, rootName, only2D): nbar = 0 for e in self.experiments: nbar += len(e[2]) ilat = [self.hycomilat[0], self.hycomilat[-1] + 1] ilon = [self.hycomilon[0], self.hycomilon[-1] + 1] # ~~~~ Time records ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Extract HYCOM time records' self.slf3d.tags = {'times': []} # ~~~~ Start Date and Time ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ self.slf3d.tags['times'] = 86400.0 * np.arange(nbar) self.slf2d.tags = {'times': self.slf3d.tags['times']} self.slf3d.DATETIME = self.experiments[-1][3][0].timetuple()[0:6] self.slf2d.DATETIME = self.slf3d.DATETIME self.slf3d.IPARAM[9] = 1 self.slf2d.IPARAM[9] = 1 #a = np.arange(40).reshape(5,8)[::-1].ravel() # 5 plans, 8 points print ' +> Write SELAFIN headers' if not only2D: self.slf3d.fole = {} self.slf3d.fole.update({'hook': open('t3d_' + rootName, 'wb')}) self.slf3d.fole.update({'name': 't3d_' + rootName}) self.slf3d.fole.update({'endian': ">"}) # big endian self.slf3d.fole.update({'float': ('f', 4)}) # single precision self.slf3d.appendHeaderSLF() self.slf2d.fole = {} self.slf2d.fole.update({'hook': open('t2d_' + rootName, 'wb')}) self.slf2d.fole.update({'name': 't2d_' + rootName}) self.slf2d.fole.update({'endian': ">"}) # big endian self.slf2d.fole.update({'float': ('f', 4)}) # single precision self.slf2d.appendHeaderSLF() # ~~~~ Time loop(s) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ var3d = np.zeros(self.slf3d.NPOIN3, dtype=np.float) var2d = np.zeros(self.slf2d.NPOIN3, dtype=np.float) print ' +> Write SELAFIN cores' ibar = 0 pbar = ProgressBar(maxval=10 * nbar).start() for e in self.experiments[::-1]: hycomdata = e[0] i1 = min(e[2]) i2 = max(e[2]) + 1 for t in range(i1, i2): # ~~> time stamp pbar.write(' x ' + str(e[3][t - i1]), 10 * ibar + 0) pbar.update(10 * ibar + 0) if not only2D: self.slf3d.appendCoreTimeSLF(ibar) self.slf2d.appendCoreTimeSLF(ibar) # ~~> HYCOM variable extraction ( 1L:times, 33L:layers, yyL:NY1D, xxL:NX1D ) # ~~> ELEVATION success = False while not success: try: success = True pbar.write(' - ssh', 10 * ibar + 1) v2d = np.swapaxes( hycomdata['ssh']['ssh'].data[t, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 0, 1).ravel() except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) var2d = np.where(v2d < 10000, v2d, 0.0) self.slf2d.appendCoreVarsSLF([var2d]) if not only2D: var3d = -np.tile(self.ZPLAN, self.slf3d.NPOIN2).reshape( self.slf3d.NPOIN2, self.slf3d.NPLAN).T.ravel() var3d[self.slf3d.NPOIN3 - self.slf3d.NPOIN2:] = var2d self.slf3d.appendCoreVarsSLF([var3d]) pbar.update(10 * ibar + 1) # ~~> SALINITY success = False while not success: try: success = True pbar.write(' - surface_salinity_trend', 10 * ibar + 2) v2d = np.swapaxes( hycomdata['surface_salinity_trend'] ['surface_salinity_trend'].data[ t, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 0, 1).ravel() except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) var2d = np.where(v2d < 10000, v2d, 0.0) self.slf2d.appendCoreVarsSLF([var2d]) pbar.update(10 * ibar + 2) if not only2D: success = False while not success: try: success = True pbar.write(' - salinity', 10 * ibar + 3) var = np.swapaxes( hycomdata['salinity']['salinity'].data[ t, 0:self.slf3d.NPLAN, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) v3d = var[::-1].ravel() var3d = np.where(v3d < 10000, v3d, 0.0) self.slf3d.appendCoreVarsSLF([var3d]) pbar.update(10 * ibar + 3) # ~~> TEMPERATURE success = False while not success: try: success = True pbar.write(' - surface_temperature_trend', 10 * ibar + 4) v2d = np.swapaxes( hycomdata['surface_temperature_trend'] ['surface_temperature_trend'].data[ t, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 0, 1).ravel() except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) var2d = np.where(v2d < 10000, v2d, 0.0) self.slf2d.appendCoreVarsSLF([var2d]) pbar.update(10 * ibar + 4) if not only2D: success = False while not success: try: success = True pbar.write(' - temperature', 10 * ibar + 5) var = np.swapaxes( hycomdata['temperature']['temperature'].data[ t, 0:self.slf3d.NPLAN, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) v3d = var[::-1].ravel() var3d = np.where(v3d < 10000, v3d, 0.0) self.slf3d.appendCoreVarsSLF([var3d]) pbar.update(10 * ibar + 5) # ~~> VELOCITY U success = False while not success: try: success = True pbar.write(' - u-velocity', 10 * ibar + 6) if only2D: var = np.swapaxes( hycomdata['u']['u'].data[t, 0:1, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) else: var = np.swapaxes( hycomdata['u']['u'].data[t, 0:self.slf3d.NPLAN, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) v2d = var[0].ravel() var2d = np.where(v2d < 10000, v2d, 0.0) self.slf2d.appendCoreVarsSLF([var2d]) if not only2D: v3d = var[::-1].ravel() var3d = np.where(v3d < 10000, v3d, 0.0) self.slf3d.appendCoreVarsSLF([var3d]) pbar.update(10 * ibar + 6) # ~~> VELOCITY V success = False while not success: try: success = True pbar.write(' - v-velocity', 10 * ibar + 7) if only2D: var = np.swapaxes( hycomdata['v']['v'].data[t, 0:1, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) else: var = np.swapaxes( hycomdata['v']['v'].data[t, 0:self.slf3d.NPLAN, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) v2d = var[0].ravel() var2d = np.where(v2d < 10000, v2d, 0.0) self.slf2d.appendCoreVarsSLF([var2d]) if not only2D: v3d = var[::-1].ravel() var3d = np.where(v3d < 10000, v3d, 0.0) self.slf3d.appendCoreVarsSLF([var3d]) pbar.update(10 * ibar + 7) # ~~> VELOCITY W if not only2D: var3d = 0. * var3d self.slf3d.appendCoreVarsSLF([var3d]) # ~~> EMP ??? success = False while not success: try: success = True pbar.write(' - emp', 10 * ibar + 8) v2d = np.swapaxes( hycomdata['emp']['emp'].data[t, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 0, 1).ravel() except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) var2d = np.where(v2d < 10000, v2d, 0.0) self.slf2d.appendCoreVarsSLF([var2d]) pbar.update(10 * ibar + 8) # ~~> TEMPERATURE success = False while not success: try: success = True pbar.write(' - qtot', 10 * ibar + 9) v2d = np.swapaxes( hycomdata['qtot']['qtot'].data[t, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 0, 1).ravel() except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) var2d = np.where(v2d < 10000, v2d, 0.0) self.slf2d.appendCoreVarsSLF([var2d]) pbar.update(10 * ibar + 9) ibar += 1 pbar.finish() if not only2D: self.slf3d.fole['hook'].close() self.slf2d.fole['hook'].close() def __del__(self): pass
class ECMWF(): def __init__(self, dataset, dates, request): # ~~> inheritence self.slf2d = SELAFIN('') # surface self.slf2d.DATETIME = dates[0] # ~> Initialisation self.moddates = [datetime(*dates[0]), datetime(*dates[1])] status = '' self.request = request # ~> Establish connection self.connection = Connection(config['email'], config['key'], quiet=True, verbose=False) # ~> Verify connection user = self.connection.call("%s/%s" % (config['url'], "who-am-i")) print ' ~> access through username: %s\n' % ( user["full_name"] or "user '%s'" % user["uid"], ) # ~> Request dataset self.connection.submit("%s/%s/requests" % (config['url'], dataset), request) status = self.connection.status print ' ~> request has been', status # ~> Wait for remote processing while not self.connection.ready(): if status != self.connection.status: status = self.connection.status print ' ~> request remains', status, '...' self.connection.wait() # ~> Request completed print ' ~> request is now', self.connection.status self.connection.cleanup() def downloadECMWF(self): result = self.connection.result() fileName = self.request.get("target") # ~> tries connecting 3 times before stopping tries = 0 while True: # ~> downloading file by blocks http = urllib2.urlopen(result["href"]) f = open(fileName, "wb") ibar = 0 pbar = ProgressBar(maxval=result["size"]).start() while True: chunk = http.read(1024 * 1024) if not chunk: break f.write(chunk) ibar += len(chunk) pbar.update(ibar) f.flush() f.close() pbar.finish() # ~> have I got everything ? if ibar == result["size"]: break if tries == 3: print " ... exhausted the number of download trials.\nYou may wish to attempt this again later." sys.exit() print " ... trying to download the data once more ..." tries += 1 def appendHeaderECMWF(self, ecmwfdata): # ~~> variables self.slf2d.TITLE = '' self.slf2d.NBV1 = len( ecmwfdata.variables) - 3 # less longitude, latitude and time self.slf2d.NVAR = self.slf2d.NBV1 self.slf2d.VARINDEX = range(self.slf2d.NVAR) self.slf2d.VARNAMES = ['SURFACE PRESSURE', \ 'WIND VELOCITY U ','WIND VELOCITY V ', \ 'AIR TEMPERATURE '] self.slf2d.VARUNITS = ['UI ', \ 'M/S ','M/S ', \ 'DEGREES '] # ~~> 2D grid x = ecmwfdata.variables['longitude'][:] NX1D = len(x) y = ecmwfdata.variables['latitude'][:] NY1D = len(y) self.slf2d.MESHX = np.tile(x, NY1D).reshape(NY1D, NX1D).T.ravel() self.slf2d.MESHY = np.tile(y, NX1D) # ~~> lat,lon correction for i in range(NX1D): if (self.slf2d.MESHX[i] > 180): self.slf2d.MESHX[i] = self.slf2d.MESHX[i] - 360.0 #for i in range(2172,NY1D): # self.slf2d.MESHY[i] = 47.0 + ( i-2172 )/18.0 self.slf2d.NPLAN = 1 self.slf2d.NDP2 = 3 self.slf2d.NDP3 = self.slf2d.NDP2 self.slf2d.NPOIN2 = NX1D * NY1D self.slf2d.NPOIN3 = self.slf2d.NPOIN2 self.slf2d.NELEM2 = 2 * (NX1D - 1) * (NY1D - 1) self.slf2d.NELEM3 = self.slf2d.NELEM2 self.slf2d.IPARAM = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0] # ~~> Connectivity ielem = 0 pbar = ProgressBar(maxval=self.slf2d.NELEM3).start() self.slf2d.IKLE3 = np.zeros((self.slf2d.NELEM3, self.slf2d.NDP3), dtype=np.int) for i in range(1, NX1D): for j in range(1, NY1D): ipoin = (i - 1) * NY1D + j - 1 # ~~> first triangle self.slf2d.IKLE3[ielem][0] = ipoin self.slf2d.IKLE3[ielem][1] = ipoin + NY1D self.slf2d.IKLE3[ielem][2] = ipoin + 1 ielem = ielem + 1 pbar.update(ielem) # ~~> second triangle self.slf2d.IKLE3[ielem][0] = ipoin + NY1D self.slf2d.IKLE3[ielem][1] = ipoin + NY1D + 1 self.slf2d.IKLE3[ielem][2] = ipoin + 1 ielem = ielem + 1 pbar.update(ielem) pbar.finish() # ~~> Boundaries pbar = ProgressBar(maxval=NX1D + NY1D).start() self.slf2d.IPOB3 = np.zeros(self.slf2d.NPOIN3, dtype=np.int) # ~~> along the x-axis (lon) for i in range(NX1D): ipoin = i * NY1D self.slf2d.IPOB3[ipoin] = i + 1 ipoin = i * NY1D - 1 self.slf2d.IPOB3[ipoin] = 2 * NX1D + (NY1D - 2) - i pbar.update(i) # ~~> along the y-axis (alt) for i in range(1, NY1D): ipoin = i self.slf2d.IPOB3[ipoin] = 2 * NX1D + 2 * (NY1D - 2) - i + 1 ipoin = NY1D * (NX1D - 1) + i self.slf2d.IPOB3[ipoin] = NX1D + i pbar.update(i + NX1D) pbar.finish() # ~~~~ Time records ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ATs = ecmwfdata.variables['time'][:] self.slf2d.tags = { 'times': 3600 * (ATs - ATs[0]) } # time record in hours # self.slf2d.DATETIME = period[0] ... already set self.slf2d.appendHeaderSLF() def appendCoreTimeECMWF(self, t): self.slf2d.appendCoreTimeSLF(t) def appendCoreVarsECMWF(self, ecmwfdata, itime): # Note: this is how you get to the attributes ... # ecmwfdata.variables['sp'].ncattrs() # in particular ... # ecmwfdata.variables['sp'].units # ecmwfdata.variables['sp'].missing_value # ~~> SURFACE PRESSURE == 'sp' var2d = np.swapaxes(ecmwfdata.variables['sp'][itime][:], 0, 1).ravel() varof = ecmwfdata.variables['sp'].add_offset varsf = ecmwfdata.variables['sp'].scale_factor #print ecmwfdata.variables['sp'].units self.slf2d.appendCoreVarsSLF([varsf * var2d + varof]) # ~~> WIND VELOCITY U == 'u10' var2d = np.swapaxes(ecmwfdata.variables['u10'][itime][:], 0, 1).ravel() varof = ecmwfdata.variables['u10'].add_offset varsf = ecmwfdata.variables['u10'].scale_factor #print ecmwfdata.variables['u10'].units self.slf2d.appendCoreVarsSLF([varsf * var2d + varof]) # ~~> WIND VELOCITY V == 'v10' var2d = np.swapaxes(ecmwfdata.variables['v10'][itime][:], 0, 1).ravel() varof = ecmwfdata.variables['v10'].add_offset varsf = ecmwfdata.variables['v10'].scale_factor #print ecmwfdata.variables['v10'].units self.slf2d.appendCoreVarsSLF([varsf * var2d + varof]) # ~~> AIR TEMPERATURE == 't2m' var2d = np.swapaxes(ecmwfdata.variables['t2m'][itime][:], 0, 1).ravel() varof = ecmwfdata.variables['t2m'].add_offset varsf = ecmwfdata.variables['t2m'].scale_factor self.slf2d.appendCoreVarsSLF([varsf * var2d + varof - 273.15 ]) # Kelvin to Celsius def putContent(self, fileName, showbar=True): # ~~> netcdf reader ecmwfdata = netcdf.netcdf_file(self.request.get("target"), 'r') # ~~> new SELAFIN writer self.slf2d.fole = {} self.slf2d.fole.update({'hook': open(fileName, 'wb')}) self.slf2d.fole.update({'name': fileName}) self.slf2d.fole.update({'endian': ">"}) # big endian self.slf2d.fole.update({'float': ('f', 4)}) # single precision print ' +> Write SELAFIN header' self.appendHeaderECMWF(ecmwfdata) print ' +> Write SELAFIN core' ibar = 0 if showbar: pbar = ProgressBar(maxval=len(self.slf2d.tags['times'])).start() for t in range(len(self.slf2d.tags['times'])): self.appendCoreTimeECMWF(t) self.appendCoreVarsECMWF(ecmwfdata, ibar) ibar += 1 if showbar: pbar.update(ibar) self.slf2d.fole['hook'].close() if showbar: pbar.finish()
def copyCommonData(self): SLFn = SELAFIN('') # Meta data SLFn.TITLE = self.slf.TITLE SLFn.file = self.slf.file SLFn.IPARAM = self.slf.IPARAM # Time SLFn.DATETIME = self.slf.DATETIME SLFn.tags = self.slf.tags # Variables SLFn.NBV1 = self.slf.NBV1 SLFn.VARNAMES = self.slf.VARNAMES SLFn.VARUNITS = self.slf.VARUNITS SLFn.NBV2 = self.slf.NBV2 SLFn.CLDNAMES = self.slf.CLDNAMES SLFn.CLDUNITS = self.slf.CLDUNITS SLFn.NVAR = self.slf.NVAR SLFn.VARINDEX = range(self.slf.NVAR) # Unchanged numbers SLFn.NPLAN = self.slf.NPLAN SLFn.NDP = self.slf.NDP return SLFn
def mesh2KML(meshFile, projection): if not os.path.isdir('static'): os.mkdir('static') else: # Remove old kml files for filename in glob.glob(os.path.join('static', '*.kml')): os.remove(filename) fname = meshFile[:-3] + 'kml' print("fname before split" + fname) fname = fname.split("/")[2] #fname = "Mesh_Final_V1.kml" print("fname is " + fname) myFile = os.path.join('static', fname) print(myFile) fOut = open(myFile, 'w') mainDir = 'static' inProj = Proj(init=projection) wgs = "epsg:4326" outProj = Proj(init=wgs) newScan = scanSELAFIN(meshFile) newScan.printHeader() slf = SELAFIN(meshFile) xmin = np.min(slf.MESHX) xmax = np.max(slf.MESHX) ymin = np.min(slf.MESHY) ymax = np.max(slf.MESHY) elements = np.dstack((slf.MESHX[slf.IKLE2], slf.MESHY[slf.IKLE2])) mesh = np.array(slf.IKLE2) x = slf.MESHX y = slf.MESHY nelem = len(elements[:, 0, 0]) inKml = mainDir + "/meshviewer/polyCell.kml" fIn = open(inKml, 'r') lines = fIn.readlines() lcnt = 0 for line in lines: if lcnt < 47: fOut.write(line) lcnt = lcnt + 1 cnt = 0 for i in range(0, nelem): x1 = elements[i, 0, 0] y1 = elements[i, 0, 1] #if x1>=xmin and x1<=xmax and y1>=ymin and y1<=ymax: if i >= 0: cnt = cnt + 1 x2 = elements[i, 1, 0] y2 = elements[i, 1, 1] x3 = elements[i, 2, 0] y3 = elements[i, 2, 1] if projection != wgs: x1, y1 = transform(inProj, outProj, x1, y1) x2, y2 = transform(inProj, outProj, x2, y2) x3, y3 = transform(inProj, outProj, x3, y3) poly = (str(x1) + "," + str(y1) + "," + str(0) + " " + str(x2) + "," + str(y2) + "," + str(0) + " " + str(x3) + "," + str(y3) + "," + str(0) + " " + str(x1) + "," + str(y1) + "," + str(0)) fOut.write("<Placemark>" + "\n") fOut.write("<name>polyCell</name>" + "\n") fOut.write("<styleUrl>#msn_ylw-pushpin</styleUrl>" + "\n") fOut.write("<Polygon>" + "\n") fOut.write("<tessellate>1</tessellate>" + "\n") fOut.write("<altitudeMode>relativeToGround</altitudeMode>" + "\n") fOut.write("<outerBoundaryIs>" + "\n") fOut.write("<LinearRing>" + "\n") fOut.write("<coordinates>" + "\n") fOut.write(poly + "\n") fOut.write("</coordinates>" + "\n") fOut.write("</LinearRing>" + "\n") fOut.write("</outerBoundaryIs>" + "\n") fOut.write("</Polygon>" + "\n") fOut.write("</Placemark>" + "\n") fOut.write("</Document>" + "\n") fOut.write("</kml>" + "\n") fOut.close() fIn.close()
def add(self, typl, what): Caster.add(self, typl, what) # ~~> output from for 2D file if self.obtype == 'slf': #self.obdump.add(self.object[what['file']]) cast = self.get(typl, what) support = cast.support values = cast.values if len(support) != 3: print '... not enough information to save as 2d variable' sys.exit(1) obj = self.object[what['file']] # ~~ SELAFIN header ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ if not self.oudata: self.oudata = SELAFIN('') # create the out header self.oudata.TITLE = '' # TODO: pass it on from what and deco self.oudata.NBV1 = 0 self.oudata.VARNAMES = [] self.oudata.VARUNITS = [] self.oudata.IPARAM = obj.IPARAM self.oudata.IPARAM[6] = 1 # 3D being forced to 2D self.oudata.NDP2 = len(support[2][0]) if np.all([obj.IKLE2, support[2]]): self.oudata.IKLE2 = support[3] self.oudata.IPOB2 = np.zeros(len(supoort[0]), dtype=np.int) self.oudata.MESHX = support[0] self.oudata.MESHY = support[1] else: self.oudata.IKLE2 = obj.IKLE2 self.oudata.IPOB2 = obj.IPOB2 # IPOBO missing from support self.oudata.MESHX = obj.MESHX self.oudata.MESHY = obj.MESHY self.oudata.NELEM2 = len(self.oudata.IKLE2) self.oudata.NPOIN2 = len(self.oudata.MESHX) self.oudata.NELEM3 = self.oudata.NELEM2 self.oudata.NPOIN3 = self.oudata.NPOIN2 self.oudata.NDP3 = self.oudata.NDP2 self.oudata.NPLAN = 1 vars, vtypes = whatVarsSLF(what['vars'], obj.VARNAMES) self.oudata.NBV1 = self.oudata.NBV1 + len(vars[0]) self.oudata.NBV2 = 0 self.oudata.NVAR = self.oudata.NBV1 + self.oudata.NBV2 self.oudata.CLDNAMES = [] self.oudata.CLDUNITS = [] self.oudata.VARINDEX = range(self.oudata.NVAR) for ivar, ival in zip(vars[0], range(len(vars[0]))): self.oudata.VARNAMES.append(obj.VARNAMES[ivar]) self.oudata.VARUNITS.append(obj.VARUNITS[ivar]) self.obdata.update({obj.VARNAMES[ivar]: [values[ival]]}) if max(self.oudata.IPARAM[9], obj.IPARAM[9]) > 0: if self.oudata.DATETIME != obj.DATETIME: self.oudata.IPARAM[9] = 0 if self.oudata.NELEM2 != obj.NELEM2 or self.oudata.NPOIN2 != obj.NPOIN2: print '... mismatch between the 2D sizes of layers of a same save2d object ' sys.exit(1) self.oudata.IKLE3 = self.oudata.IKLE2 self.oudata.IPOB3 = self.oudata.IPOB2 # ~~> unkonwn else: # TODO: raise exception print '... do not know how to write to this format: ' + self.obtype sys.exit(1)
def __init__(self, fileName): # ~~> empty SELAFIN SELAFIN.__init__(self, '') self.DATETIME = [] # ~~> variables self.TITLE = '' self.NBV1 = 1 self.NVAR = self.NBV1 self.VARINDEX = range(self.NVAR) self.VARNAMES = ['BOTTOM '] self.VARUNITS = ['M '] # ~~ Openning files ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ self.file = {} self.file.update({'name': fileName}) self.file.update({'endian': ">" }) # "<" means little-endian, ">" means big-endian self.file.update({'integer': ('i', 4)}) #'i' size 4 self.file.update({'float': ('f', 4)}) #'f' size 4, 'd' = size 8 self.file.update({'hook': open(fileName, 'rt')}) file = iter(self.file['hook']) # ~~ Read/Write dimensions ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Note: # The section MeshFormat is mandatory line = file.next() proc = re.match(self.frst_keys, line) if proc: if proc.group('key') != "MeshFormat": print '... Could not recognise your MSH file format. Missing MeshFormat key.' sys.exit(1) line = file.next().split() if line[0] != "2.2": print '... Could not read your MSH file format. Only the version 2.2 is allowed.' sys.exit(1) fileType = int(line[1]) if fileType == 1: print '... I have never done this before. Do check it works' line = file.next() l, isize, chk = unpack('>i', line.read(4 + 4 + 4)) floatSize = int(line[2]) if floatSize == 8: self.file['float'] = ('d', 8) line = file.next() proc = re.match(self.last_keys, line) if proc: if proc.group('key') != "MeshFormat": print '... Could not complete reading the header of you MSH file format. Missing EndMeshFormat key.' sys.exit(1) # ~~ Loop on sections ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ while True: try: line = file.next() except: break proc = re.match(self.frst_keys, line) if not proc: print '... Was expecting a new Section starter. Found this instead: ', line sys.exit(1) key = proc.group('key') # ~~ Section Nodes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ if key == "Nodes": print ' +> mesh x,y,z' NPOIN = int(file.next()) if self.file['float'][0] == 'd': MESHX = np.zeros(NPOIN, dtype=np.float64) MESHY = np.zeros(NPOIN, dtype=np.float64) MESHZ = np.zeros(NPOIN, dtype=np.float64) else: MESHX = np.zeros(NPOIN, dtype=np.float) MESHY = np.zeros(NPOIN, dtype=np.float) MESHZ = np.zeros(NPOIN, dtype=np.float) #map_nodes = [] for i in range(NPOIN): line = file.next().split() #map_nodes.append(int(line[0])) MESHX[i] = np.float(line[1]) MESHY[i] = np.float(line[2]) MESHZ[i] = np.float(line[3]) # TODO: renumbering nodes according to map_nodes ? #map_nodes = np.asarray(map_nodes) self.NPOIN2 = NPOIN self.MESHX = MESHX self.MESHY = MESHY self.MESHZ = MESHZ line = file.next() # ~~ Section Nodes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ elif proc.group('key') == "Elements": print ' +> renumbered connectivity' NELEM = int(file.next()) IKLE2 = -np.ones((NELEM, 3), dtype=np.int) for i in range(NELEM): line = file.next().split() if int(line[1]) != 2: continue e = line[int(line[2]) + 3:] IKLE2[i] = [np.int(e[0]), np.int(e[1]), np.int(e[2])] self.IKLE2 = IKLE2[np.not_equal(*(np.sort(IKLE2).T[0::2]))] - 1 self.NELEM2 = len(self.IKLE2) line = file.next() # TODO: fitting the unique node numbers with map_nodes ? # ~~ Unnecessary section ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ else: while True: line = file.next() if re.match(self.last_keys, line): break proc = re.match(self.last_keys, line) if proc: if proc.group('key') != key: print '... Could not complete reading the header of you MSH file format. Missing ', key, ' end key.' sys.exit(1) # ~~> sizes print ' +> sizes' self.NDP3 = 3 self.NDP2 = 3 self.NPLAN = 1 self.NELEM3 = self.NELEM2 self.NPOIN3 = self.NPOIN2 self.IKLE3 = self.IKLE2 self.IPARAM = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0] print ' +> boundaries' # ~~> establish neighborhood neighbours = Triangulation( self.MESHX, self.MESHY, self.IKLE3).get_cpp_triangulation().get_neighbors() # ~~> build the enssemble of boundary segments ebounds = [] #print ' - identify' #pbar = ProgressBar(maxval=self.NELEM3).start() #for i in range(self.NELEM3): # if neighbours[i,0] < 0: ebounds.append([self.IKLE3[i][0],self.IKLE3[i][1]]) # if neighbours[i,1] < 0: ebounds.append([self.IKLE3[i][1],self.IKLE3[i][2]]) # if neighbours[i,2] < 0: ebounds.append([self.IKLE3[i][2],self.IKLE3[i][0]]) # pbar.update(i) #pbar.finish() # ~~> assemble the enssemble of boundary segments #print ' - assemble' #pbounds = polygons.joinSegments(ebounds) # ~~> define IPOBO from an arbitrary start point #print ' - set' self.IPOB3 = np.ones(self.NPOIN3, dtype=np.int) #self.IPOB3 = np.zeros(self.NPOIN3,dtype=np.int) #iptfr = 0 #for p in pbounds: # for n in p[1:]: # iptfr += 1 # self.IPOB3[n] = iptfr self.IPOB2 = self.IPOB3
print '... the provided cliFile does not seem to exist: ' + cliFile + '\n\n' sys.exit(1) geoFile = options.args[1] if not path.exists(cliFile): print '... the provided geoFile does not seem to exist: ' + geoFile + '\n\n' sys.exit(1) # Read the new CLI file to get boundary node numbers print ' +> getting hold of the CONLIM file and of its liquid boundaries' cli = CONLIM(cliFile) # Keeping only open boundary nodes BOR = np.extract(cli.BOR['lih'] != 2, cli.BOR['n']) # Find corresponding (x,y) in corresponding new mesh print ' +> getting hold of the GEO file and of its bathymetry' geo = SELAFIN(geoFile) xys = np.vstack((geo.MESHX[BOR - 1], geo.MESHY[BOR - 1])).T bat = geo.getVariablesAt(0, subsetVariablesSLF("BOTTOM: ", geo.VARNAMES)[0])[0] # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< # ~~~~ slf existing res ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ slfFile = options.args[2] if not path.exists(cliFile): print '... the provided slfFile does not seem to exist: ' + slfFile + '\n\n' sys.exit(1) slf = SELAFIN(slfFile) slf.setKDTree() slf.setMPLTri()