class HYCOM(): def __init__(self, dates): # ~~~~ Initialisation ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ self.moddates = [datetime(*dates[0]), datetime(*dates[1])] # ~~~~ Time records ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Extract HYCOM time records\n' hycomurls = [ \ 'http://tds.hycom.org/thredds/dodsC/GLBa0.08/expt_91.2', \ 'http://tds.hycom.org/thredds/dodsC/GLBa0.08/expt_91.1', \ 'http://tds.hycom.org/thredds/dodsC/GLBa0.08/expt_91.0', \ 'http://tds.hycom.org/thredds/dodsC/GLBa0.08/expt_90.9', \ 'http://tds.hycom.org/thredds/dodsC/GLBa0.08/expt_90.8', \ 'http://tds.hycom.org/thredds/dodsC/GLBa0.08/expt_90.6' ] self.experiments = [] for hycomurl in hycomurls: success = False while not success: try: success = True hycomdata = open_url(hycomurl) NIT = hycomdata['Date'].shape[0] print ' x ' + str( NIT) + ' records from ' + hycomurl, ITs = [] ATs = [] z = zip(hycomdata['Date'][0:NIT], range(NIT)) except: success = False print ' ... re-attempting ' for hycomdate, itime in z: d = datetime(int(str(hycomdate)[0:4]), int(str(hycomdate)[4:6]), int(str(hycomdate)[6:8])) if itime == 0: print ' from: ', str(d), if itime == NIT - 1: print ' to: ', str(d) if self.moddates[0] <= d and d <= self.moddates[1]: ITs.append(itime) ATs.append(d) if ITs != []: self.experiments.append((hycomdata, NIT, ITs, ATs, hycomurl)) print '\n' def getHeaderHYCOM(self, bounds): # ~~> inheritence self.slf3d = SELAFIN('') # slf3d self.slf2d = SELAFIN('') # slf2d surface print ' +> Set SELAFIN Variables' self.slf3d.TITLE = '' self.slf3d.NBV1 = 6 self.slf3d.NVAR = 6 self.slf3d.VARINDEX = range(self.slf3d.NVAR) self.slf3d.VARNAMES = ['ELEVATION Z ', \ 'SALINITY ','TEMPERATURE ', \ 'VELOCITY U ','VELOCITY V ','VELOCITY W '] self.slf3d.VARUNITS = ['M ', \ 'G/L ','DEGREES ', \ 'M/S ','M/S ','M/S '] self.slf2d.TITLE = self.slf3d.TITLE self.slf2d.NBV1 = self.slf3d.NBV1 + 1 self.slf2d.NVAR = self.slf3d.NVAR + 1 self.slf2d.VARINDEX = range(self.slf2d.NVAR) self.slf2d.VARNAMES = self.slf3d.VARNAMES[0:-1] self.slf2d.VARNAMES.append('EMP ') self.slf2d.VARNAMES.append('QTOT ') self.slf2d.VARUNITS = self.slf3d.VARUNITS[0:-1] self.slf2d.VARUNITS.append('??? ') self.slf2d.VARUNITS.append('??? ') # ~~> server access, # get the grid and header from the latest experiment self.hycomdata = self.experiments[0][0] # ~~~~ Grid coordinates ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ success = False while not success: try: success = True # ~~> the whole of the 2D grid sizes print ' +> Extract HYCOM sizes' NX1D = self.hycomdata['X'].shape[0] NY1D = self.hycomdata['Y'].shape[0] print ' +> Extract HYCOM mesh' lonX1D = self.hycomdata['Longitude']['Longitude'].data[ 0, 0:NX1D].ravel() % 360 latY1D = self.hycomdata['Latitude']['Latitude'].data[ 0:NY1D, 0].ravel() except: success = False print ' ... re-attempting ' # ~~> lat,lon correction for i in range(NX1D): if (lonX1D[i] > 180): lonX1D[i] = lonX1D[i] - 360.0 for i in range(2172, NY1D): latY1D[i] = 47.0 + (i - 2172) / 18.0 # ~~> subset for the SELAFIN print ' +> Set SELAFIN mesh' self.hycomilon = np.where( (lonX1D >= bounds[0][1]) * (lonX1D <= bounds[1][1]))[0] self.hycomilat = np.where( (latY1D >= bounds[0][0]) * (latY1D <= bounds[1][0]))[0] x = lonX1D[self.hycomilon] y = latY1D[self.hycomilat] NX1D = len(x) NY1D = len(y) # ~~~~ MESH sizes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # ~~> 3D success = False while not success: try: success = True print ' +> Set SELAFIN sizes' self.slf3d.NPLAN = self.hycomdata['Depth'].shape[0] self.ZPLAN = self.hycomdata['Depth'][ 0:self.slf3d.NPLAN][::-1] # I do not know any other way except: success = False print ' ... re-attempting ' self.slf3d.NDP2 = 3 self.slf3d.NDP3 = 6 self.slf3d.NPOIN2 = NX1D * NY1D self.slf3d.NPOIN3 = self.slf3d.NPOIN2 * self.slf3d.NPLAN self.slf3d.NELEM2 = 2 * (NX1D - 1) * (NY1D - 1) self.slf3d.NELEM3 = self.slf3d.NELEM2 * (self.slf3d.NPLAN - 1) self.slf3d.IPARAM = [0, 0, 0, 0, 0, 0, self.slf3d.NPLAN, 0, 0, 0] # ~~> 2D self.slf2d.NPLAN = 1 self.slf2d.NDP2 = self.slf3d.NDP2 self.slf2d.NDP3 = self.slf2d.NDP2 self.slf2d.NPOIN2 = self.slf3d.NPOIN2 self.slf2d.NPOIN3 = self.slf2d.NPOIN2 self.slf2d.NELEM2 = self.slf3d.NELEM2 self.slf2d.NELEM3 = self.slf2d.NELEM2 self.slf2d.IPARAM = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0] print ' +> Set SELAFIN mesh' self.slf3d.MESHX = np.tile(x, NY1D).reshape(NY1D, NX1D).T.ravel() self.slf3d.MESHY = np.tile(y, NX1D) self.slf2d.MESHX = self.slf3d.MESHX[0:self.slf2d.NPOIN2] self.slf2d.MESHY = self.slf3d.MESHY[0:self.slf2d.NPOIN2] # ~~~~ Connectivity ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set SELAFIN IKLE' ielem = 0 pbar = ProgressBar(maxval=self.slf3d.NELEM3).start() self.slf3d.IKLE3 = np.zeros((self.slf3d.NELEM3, self.slf3d.NDP3), dtype=np.int) for k in range(1, self.slf3d.NPLAN): for i in range(1, NX1D): for j in range(1, NY1D): ipoin = (i - 1) * NY1D + j - 1 + (k - 1) * self.slf3d.NPOIN2 # ~~> first prism self.slf3d.IKLE3[ielem][0] = ipoin self.slf3d.IKLE3[ielem][1] = ipoin + NY1D self.slf3d.IKLE3[ielem][2] = ipoin + 1 self.slf3d.IKLE3[ielem][3] = ipoin + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][ 4] = ipoin + NY1D + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][5] = ipoin + 1 + self.slf3d.NPOIN2 ielem = ielem + 1 pbar.update(ielem) # ~~> second prism self.slf3d.IKLE3[ielem][0] = ipoin + NY1D self.slf3d.IKLE3[ielem][1] = ipoin + NY1D + 1 self.slf3d.IKLE3[ielem][2] = ipoin + 1 self.slf3d.IKLE3[ielem][ 3] = ipoin + NY1D + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][ 4] = ipoin + NY1D + 1 + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][5] = ipoin + 1 + self.slf3d.NPOIN2 ielem = ielem + 1 pbar.update(ielem) pbar.finish() self.slf2d.IKLE3 = np.compress( np.repeat([True, False], self.slf2d.NDP2), self.slf3d.IKLE3[0:self.slf3d.NELEM2], axis=1) #.reshape((self.slf3d.NELEM2,self.slf3d.NDP2)) # ~~~~ Boundaries ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set SELAFIN IPOBO' pbar = ProgressBar(maxval=NX1D + NY1D).start() self.slf3d.IPOB3 = np.zeros(self.slf3d.NPOIN3, dtype=np.int) # ~~> along the x-axis (lon) for i in range(NX1D): for k in range(1, self.slf3d.NPLAN + 1): ipoin = i * NY1D + (k - 1) * (2 * NX1D + 2 * NY1D - 4) self.slf3d.IPOB3[ipoin] = i + 1 + (k - 1) * (2 * NX1D + 2 * NY1D - 4) ipoin = i * NY1D - 1 + (k - 1) * (2 * NX1D + 2 * NY1D - 4) self.slf3d.IPOB3[ipoin] = 2 * NX1D + ( NY1D - 2) - i + (k - 1) * (2 * NX1D + 2 * NY1D - 4) pbar.update(i) # ~~> along the y-axis (alt) for i in range(1, NY1D): for k in range(1, self.slf3d.NPLAN + 1): ipoin = i + (k - 1) * (2 * NX1D + 2 * NY1D - 4) self.slf3d.IPOB3[ipoin] = 2 * NX1D + 2 * (NY1D - 2) - i + 1 + ( k - 1) * (2 * NX1D + 2 * NY1D - 4) ipoin = NY1D * (NX1D - 1) + i + (k - 1) * (2 * NX1D + 2 * NY1D - 4) self.slf3d.IPOB3[ipoin] = NX1D + i + (k - 1) * (2 * NX1D + 2 * NY1D - 4) pbar.update(i + NX1D) pbar.finish() self.slf2d.IPOB3 = self.slf3d.IPOB3[0:self.slf3d.NPOIN2] def putContent(self, rootName, only2D): nbar = 0 for e in self.experiments: nbar += len(e[2]) ilat = [self.hycomilat[0], self.hycomilat[-1] + 1] ilon = [self.hycomilon[0], self.hycomilon[-1] + 1] # ~~~~ Time records ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Extract HYCOM time records' self.slf3d.tags = {'times': []} # ~~~~ Start Date and Time ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ self.slf3d.tags['times'] = 86400.0 * np.arange(nbar) self.slf2d.tags = {'times': self.slf3d.tags['times']} self.slf3d.DATETIME = self.experiments[-1][3][0].timetuple()[0:6] self.slf2d.DATETIME = self.slf3d.DATETIME self.slf3d.IPARAM[9] = 1 self.slf2d.IPARAM[9] = 1 #a = np.arange(40).reshape(5,8)[::-1].ravel() # 5 plans, 8 points print ' +> Write SELAFIN headers' if not only2D: self.slf3d.fole = {} self.slf3d.fole.update({'hook': open('t3d_' + rootName, 'wb')}) self.slf3d.fole.update({'name': 't3d_' + rootName}) self.slf3d.fole.update({'endian': ">"}) # big endian self.slf3d.fole.update({'float': ('f', 4)}) # single precision self.slf3d.appendHeaderSLF() self.slf2d.fole = {} self.slf2d.fole.update({'hook': open('t2d_' + rootName, 'wb')}) self.slf2d.fole.update({'name': 't2d_' + rootName}) self.slf2d.fole.update({'endian': ">"}) # big endian self.slf2d.fole.update({'float': ('f', 4)}) # single precision self.slf2d.appendHeaderSLF() # ~~~~ Time loop(s) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ var3d = np.zeros(self.slf3d.NPOIN3, dtype=np.float) var2d = np.zeros(self.slf2d.NPOIN3, dtype=np.float) print ' +> Write SELAFIN cores' ibar = 0 pbar = ProgressBar(maxval=10 * nbar).start() for e in self.experiments[::-1]: hycomdata = e[0] i1 = min(e[2]) i2 = max(e[2]) + 1 for t in range(i1, i2): # ~~> time stamp pbar.write(' x ' + str(e[3][t - i1]), 10 * ibar + 0) pbar.update(10 * ibar + 0) if not only2D: self.slf3d.appendCoreTimeSLF(ibar) self.slf2d.appendCoreTimeSLF(ibar) # ~~> HYCOM variable extraction ( 1L:times, 33L:layers, yyL:NY1D, xxL:NX1D ) # ~~> ELEVATION success = False while not success: try: success = True pbar.write(' - ssh', 10 * ibar + 1) v2d = np.swapaxes( hycomdata['ssh']['ssh'].data[t, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 0, 1).ravel() except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) var2d = np.where(v2d < 10000, v2d, 0.0) self.slf2d.appendCoreVarsSLF([var2d]) if not only2D: var3d = -np.tile(self.ZPLAN, self.slf3d.NPOIN2).reshape( self.slf3d.NPOIN2, self.slf3d.NPLAN).T.ravel() var3d[self.slf3d.NPOIN3 - self.slf3d.NPOIN2:] = var2d self.slf3d.appendCoreVarsSLF([var3d]) pbar.update(10 * ibar + 1) # ~~> SALINITY success = False while not success: try: success = True pbar.write(' - surface_salinity_trend', 10 * ibar + 2) v2d = np.swapaxes( hycomdata['surface_salinity_trend'] ['surface_salinity_trend'].data[ t, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 0, 1).ravel() except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) var2d = np.where(v2d < 10000, v2d, 0.0) self.slf2d.appendCoreVarsSLF([var2d]) pbar.update(10 * ibar + 2) if not only2D: success = False while not success: try: success = True pbar.write(' - salinity', 10 * ibar + 3) var = np.swapaxes( hycomdata['salinity']['salinity'].data[ t, 0:self.slf3d.NPLAN, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) v3d = var[::-1].ravel() var3d = np.where(v3d < 10000, v3d, 0.0) self.slf3d.appendCoreVarsSLF([var3d]) pbar.update(10 * ibar + 3) # ~~> TEMPERATURE success = False while not success: try: success = True pbar.write(' - surface_temperature_trend', 10 * ibar + 4) v2d = np.swapaxes( hycomdata['surface_temperature_trend'] ['surface_temperature_trend'].data[ t, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 0, 1).ravel() except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) var2d = np.where(v2d < 10000, v2d, 0.0) self.slf2d.appendCoreVarsSLF([var2d]) pbar.update(10 * ibar + 4) if not only2D: success = False while not success: try: success = True pbar.write(' - temperature', 10 * ibar + 5) var = np.swapaxes( hycomdata['temperature']['temperature'].data[ t, 0:self.slf3d.NPLAN, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) v3d = var[::-1].ravel() var3d = np.where(v3d < 10000, v3d, 0.0) self.slf3d.appendCoreVarsSLF([var3d]) pbar.update(10 * ibar + 5) # ~~> VELOCITY U success = False while not success: try: success = True pbar.write(' - u-velocity', 10 * ibar + 6) if only2D: var = np.swapaxes( hycomdata['u']['u'].data[t, 0:1, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) else: var = np.swapaxes( hycomdata['u']['u'].data[t, 0:self.slf3d.NPLAN, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) v2d = var[0].ravel() var2d = np.where(v2d < 10000, v2d, 0.0) self.slf2d.appendCoreVarsSLF([var2d]) if not only2D: v3d = var[::-1].ravel() var3d = np.where(v3d < 10000, v3d, 0.0) self.slf3d.appendCoreVarsSLF([var3d]) pbar.update(10 * ibar + 6) # ~~> VELOCITY V success = False while not success: try: success = True pbar.write(' - v-velocity', 10 * ibar + 7) if only2D: var = np.swapaxes( hycomdata['v']['v'].data[t, 0:1, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) else: var = np.swapaxes( hycomdata['v']['v'].data[t, 0:self.slf3d.NPLAN, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) v2d = var[0].ravel() var2d = np.where(v2d < 10000, v2d, 0.0) self.slf2d.appendCoreVarsSLF([var2d]) if not only2D: v3d = var[::-1].ravel() var3d = np.where(v3d < 10000, v3d, 0.0) self.slf3d.appendCoreVarsSLF([var3d]) pbar.update(10 * ibar + 7) # ~~> VELOCITY W if not only2D: var3d = 0. * var3d self.slf3d.appendCoreVarsSLF([var3d]) # ~~> EMP ??? success = False while not success: try: success = True pbar.write(' - emp', 10 * ibar + 8) v2d = np.swapaxes( hycomdata['emp']['emp'].data[t, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 0, 1).ravel() except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) var2d = np.where(v2d < 10000, v2d, 0.0) self.slf2d.appendCoreVarsSLF([var2d]) pbar.update(10 * ibar + 8) # ~~> TEMPERATURE success = False while not success: try: success = True pbar.write(' - qtot', 10 * ibar + 9) v2d = np.swapaxes( hycomdata['qtot']['qtot'].data[t, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 0, 1).ravel() except: success = False pbar.write( ' ... re-attempting because I failed ...', 10 * ibar) var2d = np.where(v2d < 10000, v2d, 0.0) self.slf2d.appendCoreVarsSLF([var2d]) pbar.update(10 * ibar + 9) ibar += 1 pbar.finish() if not only2D: self.slf3d.fole['hook'].close() self.slf2d.fole['hook'].close() def __del__(self): pass
for t in range(len(slf.tags['times'])): data = getValueHistorySLF(slf.file, slf.tags, [t], support3d, slf.NVAR, slf.NPOIN3, slf.NPLAN, vars) # special case for TEMPERATURE and SALINITY data[3] = np.maximum(data[3], zeros) data[4] = np.maximum(data[4], zeros) d = np.reshape( np.transpose( np.reshape(np.ravel(data), (bnd.NVAR, bnd.NPOIN2, bnd.NPLAN)), (0, 2, 1)), (bnd.NVAR, bnd.NPOIN3)) #for ipoin in range(bnd.NPOIN2): # for iplan in range(bnd.NPLAN-1,0,-1): # for ivar in range(bnd.NVAR)[1:]: # except for Z # if bat[BOR[ipoin]-1] > d[0][ipoin+(iplan-1)*bnd.NPOIN2]: # d[ivar][ipoin+(iplan-1)*bnd.NPOIN2] = d[ivar][ipoin+iplan*bnd.NPOIN2] # if d[3][ipoin+(iplan-1)*bnd.NPOIN2] < 28.0: # d[3][ipoin+(iplan-1)*bnd.NPOIN2] = max(d[3][ipoin+iplan*bnd.NPOIN2],28.0) bnd.appendCoreTimeSLF(t) bnd.appendCoreVarsSLF(d) pbar.update(t) pbar.finish() # Close bndFile bnd.fole['hook'].close() # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< # ~~~~ Jenkins' success message ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print '\n\nMy work is done\n\n' sys.exit(0)
class JCOPE2(): def __init__(self, dates): # ~~~~ Initialisation ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ self.moddates = [datetime(*dates[0]), datetime(*dates[1])] jcope2vars = ['el', 't', 's', 'u', 'v'] jcope2date = [1993, 1, 1] # /!\ unknown convertion of time records into dates jcope2root = 'http://apdrc.soest.hawaii.edu/dods/public_data/FRA-JCOPE2' # ~~~~ Time records ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Extract JCOPE2 time records\n' self.experiments = [] experiment = {} # /!\ only one time period covered at this stage for jvar in jcope2vars: jcope2url = jcope2root + '/' + jvar jcope2data = open_url(jcope2url) NIT = jcope2data['time'].shape[0] print ' x ' + str(NIT) + ' records from ' + jcope2url, ITs = [] ATs = [] for itime in range(NIT): d = datetime(jcope2date[0], jcope2date[1], jcope2date[2]) + timedelta(itime) if itime == 0: print ' from: ', str(d), if itime == NIT - 1: print ' to: ', str(d) if self.moddates[0] <= d and d <= self.moddates[1]: ITs.append(itime) ATs.append(d) if ITs != []: for ivar in jcope2data.keys(): if ivar not in ['time', 'lev', 'lat', 'lon']: experiment.update({ivar: jcope2data[ivar]}) else: print '... I could not find the time to do your work' print ' ~> you may need to select a different time period' sys.exit(1) self.experiments.append((experiment, NIT, ITs, ATs)) print '\n' def getHeaderJCOPE2(self, bounds): # ~~> inheritence self.slf3d = SELAFIN('') # slf3d self.slf2d = SELAFIN('') # slf2d surface print ' +> Set SELAFIN Variables' self.slf3d.TITLE = '' self.slf3d.NBV1 = 6 self.slf3d.NVAR = 6 self.slf3d.VARINDEX = range(self.slf3d.NVAR) self.slf3d.VARNAMES = ['ELEVATION Z ', \ 'SALINITY ','TEMPERATURE ', \ 'VELOCITY U ','VELOCITY V ','VELOCITY W '] self.slf3d.VARUNITS = ['M ', \ ' ',' ', \ 'M/S ','M/S ','M/S '] self.slf2d.TITLE = self.slf3d.TITLE self.slf2d.NBV1 = self.slf3d.NBV1 - 1 self.slf2d.NVAR = self.slf2d.NBV1 self.slf2d.VARINDEX = range(self.slf2d.NVAR) self.slf2d.VARNAMES = self.slf3d.VARNAMES[0:-1] self.slf2d.VARUNITS = self.slf3d.VARUNITS[0:-1] # ~~~~ Grid coordinates ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # ~~> the whole of the 2D grid sizes print ' +> Extract JCOPE2 sizes' # /!\ 't' gives me access to NPLAN in 3D jcope2data = self.experiments[0][0]['temp'] NX1D = jcope2data['lon'].shape[0] NY1D = jcope2data['lat'].shape[0] print ' +> Extract JCOPE2 mesh' lonX1D = jcope2data['lon'].data[0:NX1D] latY1D = jcope2data['lat'].data[0:NY1D] # ~~> no correction for lat,lon # ~~> subset for the SELAFIN print ' +> Set SELAFIN mesh' self.jcope2ilon = np.where( (lonX1D >= bounds[0][1]) * (lonX1D <= bounds[1][1]))[0] self.jcope2ilat = np.where( (latY1D >= bounds[0][0]) * (latY1D <= bounds[1][0]))[0] x = lonX1D[self.jcope2ilon] y = latY1D[self.jcope2ilat] NX1D = len(x) NY1D = len(y) # ~~~~ MESH sizes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set SELAFIN sizes' # ~~> 3D self.slf3d.NPLAN = jcope2data['lev'].shape[0] self.ZPLAN = jcope2data['lev'][ 0:self.slf3d.NPLAN][::-1] # I do not know any other way self.slf3d.NDP2 = 3 self.slf3d.NDP3 = 6 self.slf3d.NPOIN2 = NX1D * NY1D self.slf3d.NPOIN3 = self.slf3d.NPOIN2 * self.slf3d.NPLAN self.slf3d.NELEM2 = 2 * (NX1D - 1) * (NY1D - 1) self.slf3d.NELEM3 = self.slf3d.NELEM2 * (self.slf3d.NPLAN - 1) self.slf3d.IPARAM = [0, 0, 0, 0, 0, 0, self.slf3d.NPLAN, 0, 0, 0] # ~~> 2D self.slf2d.NPLAN = 1 self.slf2d.NDP2 = self.slf3d.NDP2 self.slf2d.NDP3 = self.slf2d.NDP2 self.slf2d.NPOIN2 = self.slf3d.NPOIN2 self.slf2d.NPOIN3 = self.slf2d.NPOIN2 self.slf2d.NELEM2 = self.slf3d.NELEM2 self.slf2d.NELEM3 = self.slf2d.NELEM2 self.slf2d.IPARAM = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0] # ~~~~ Connectivity ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set the default SELAFIN IKLE 3D' ielem = 0 pbar = ProgressBar(maxval=self.slf3d.NELEM3).start() self.slf3d.IKLE3 = np.zeros((self.slf3d.NELEM3, self.slf3d.NDP3), dtype=np.int) for k in range(1, self.slf3d.NPLAN): for i in range(1, NX1D): for j in range(1, NY1D): ipoin = (i - 1) * NY1D + j - 1 + (k - 1) * self.slf3d.NPOIN2 # ~~> first prism self.slf3d.IKLE3[ielem][0] = ipoin self.slf3d.IKLE3[ielem][1] = ipoin + NY1D self.slf3d.IKLE3[ielem][2] = ipoin + 1 self.slf3d.IKLE3[ielem][3] = ipoin + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][ 4] = ipoin + NY1D + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][5] = ipoin + 1 + self.slf3d.NPOIN2 ielem = ielem + 1 pbar.update(ielem) # ~~> second prism self.slf3d.IKLE3[ielem][0] = ipoin + NY1D self.slf3d.IKLE3[ielem][1] = ipoin + NY1D + 1 self.slf3d.IKLE3[ielem][2] = ipoin + 1 self.slf3d.IKLE3[ielem][ 3] = ipoin + NY1D + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][ 4] = ipoin + NY1D + 1 + self.slf3d.NPOIN2 self.slf3d.IKLE3[ielem][5] = ipoin + 1 + self.slf3d.NPOIN2 ielem = ielem + 1 pbar.update(ielem) pbar.finish() self.slf2d.IKLE3 = np.compress( [True, True, True, False, False, False], self.slf3d.IKLE3[0:self.slf3d.NELEM2], axis=1) #.reshape((self.slf3d.NELEM2,self.slf3d.NDP2)) # ~~~~ Boundaries ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set SELAFIN IPOBO' pbar = ProgressBar(maxval=NX1D + NY1D).start() IPOB2 = np.zeros(self.slf3d.NPOIN2, dtype=np.int) # ~~> along the x-axis (lon) for i in range(NX1D): ipoin = i * NY1D IPOB2[ipoin] = i + 1 ipoin = i * NY1D - 1 IPOB2[ipoin] = 2 * NX1D + (NY1D - 2) - i pbar.update(i) # ~~> along the y-axis (alt) for i in range(1, NY1D): ipoin = i IPOB2[ipoin] = 2 * NX1D + 2 * (NY1D - 2) - i + 1 ipoin = NY1D * (NX1D - 1) + i IPOB2[ipoin] = NX1D + i pbar.update(i + NX1D) pbar.finish() # ~~~~ Connectivity ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # /!\ 'el' gives me access to the real mesh removing elements with -99 values print ' +> Mask the non-values from the SELAFIN IKLE' jcope2data = self.experiments[0][0]['el'] var = np.swapaxes( jcope2data['el'].data[0, 0, self.jcope2ilat[0]:self.jcope2ilat[-1] + 1, self.jcope2ilon[0]:self.jcope2ilon[-1] + 1][0], 1, 2).ravel() # ~> the elements you wish to keep MASK2 = self.slf2d.IKLE3[np.where( np.sum(np.in1d( self.slf2d.IKLE3, np.compress(var > -99, np.arange(len( var)))).reshape(self.slf2d.NELEM2, self.slf2d.NDP2), axis=1) == 3)] self.slf2d.NELEM2 = len(MASK2) self.slf2d.NELEM3 = self.slf2d.NELEM2 self.slf3d.NELEM2 = self.slf2d.NELEM2 self.slf3d.NELEM3 = self.slf3d.NELEM2 * (self.slf3d.NPLAN - 1) # ~~> re-numbering IKLE2 as a local connectivity matrix KNOLG, indices = np.unique(np.ravel(MASK2), return_index=True) KNOGL = dict(zip(KNOLG, range(len(KNOLG)))) self.MASK2 = np.in1d(np.arange(len(var)), KNOLG) self.MASK3 = np.tile(self.MASK2, self.slf3d.NPLAN) self.slf2d.IKLE2 = -np.ones_like(MASK2, dtype=np.int) for k in range(len(MASK2)): self.slf2d.IKLE2[k] = [ KNOGL[MASK2[k][0]], KNOGL[MASK2[k][1]], KNOGL[MASK2[k][2]] ] self.slf3d.NPOIN2 = len(KNOLG) self.slf3d.NPOIN3 = self.slf3d.NPOIN2 * self.slf3d.NPLAN self.slf2d.NPOIN2 = self.slf3d.NPOIN2 self.slf2d.NPOIN3 = self.slf2d.NPOIN2 # ~~> re-connecting the upper floors self.slf2d.IKLE3 = self.slf2d.IKLE2 self.slf3d.IKLE2 = self.slf2d.IKLE2 self.slf3d.IKLE3 = \ np.repeat(self.slf2d.NPOIN2*np.arange(self.slf3d.NPLAN-1),self.slf2d.NELEM2*self.slf3d.NDP3).reshape((self.slf2d.NELEM2*(self.slf3d.NPLAN-1),self.slf3d.NDP3)) + \ np.tile(np.add(np.tile(self.slf2d.IKLE2,2),np.repeat(self.slf2d.NPOIN2*np.arange(2),self.slf3d.NDP2)),(self.slf3d.NPLAN-1,1)) # ~~~~ Boundaries ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ self.slf2d.IPOB2 = IPOB2[self.MASK2] self.slf2d.IPOB3 = self.slf2d.IPOB2 self.slf3d.IPOB2 = self.slf2d.IPOB2 self.slf3d.IPOB3 = np.ravel( np.add( np.repeat(self.slf2d.IPOB2, self.slf3d.NPLAN).reshape( (self.slf2d.NPOIN2, self.slf3d.NPLAN)), self.slf2d.NPOIN2 * np.arange(self.slf3d.NPLAN)).T) # ~~~~ Mesh ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Set SELAFIN mesh' self.slf3d.MESHX = np.tile(x, NY1D).reshape( NY1D, NX1D).T.ravel()[self.MASK2] + 0.042 self.slf3d.MESHY = np.tile(y, NX1D)[self.MASK2] + 0.042 self.slf2d.MESHX = self.slf3d.MESHX self.slf2d.MESHY = self.slf3d.MESHY def putContent(self, rootName, only2D): nbar = 0 for e in self.experiments: nbar += len(e[2]) ilat = [self.jcope2ilat[0], self.jcope2ilat[-1] + 1] ilon = [self.jcope2ilon[0], self.jcope2ilon[-1] + 1] # ~~~~ Time records ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print ' +> Extract JCOPE2 time records' self.slf3d.tags = {'times': []} # ~~~~ Start Date and Time ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ self.slf3d.tags['times'] = 86400.0 * np.arange(nbar) self.slf2d.tags = {'times': self.slf3d.tags['times']} self.slf3d.DATETIME = self.experiments[-1][3][0].timetuple()[0:6] self.slf2d.DATETIME = self.slf3d.DATETIME self.slf3d.IPARAM[9] = 1 self.slf2d.IPARAM[9] = 1 #a = np.arange(40).reshape(5,8)[::-1].ravel() # 5 plans, 8 points print ' +> Write SELAFIN headers' if not only2D: self.slf3d.fole = {} self.slf3d.fole.update({'hook': open('t3d_' + rootName, 'wb')}) self.slf3d.fole.update({'name': 't3d_' + rootName}) self.slf3d.fole.update({'endian': ">"}) # big endian self.slf3d.fole.update({'float': ('f', 4)}) # single precision self.slf3d.appendHeaderSLF() self.slf2d.fole = {} self.slf2d.fole.update({'hook': open('t2d_' + rootName, 'wb')}) self.slf2d.fole.update({'name': 't2d_' + rootName}) self.slf2d.fole.update({'endian': ">"}) # big endian self.slf2d.fole.update({'float': ('f', 4)}) # single precision self.slf2d.appendHeaderSLF() # ~~~~ Time loop(s) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #var3d = np.zeros(self.slf3d.NPOIN3,dtype=np.float) #var2d = np.zeros(self.slf2d.NPOIN3,dtype=np.float) print ' +> Write SELAFIN cores' ibar = 0 pbar = ProgressBar(maxval=6 * nbar).start() for e in self.experiments: jcope2data = e[0] i1 = min(e[2]) i2 = max(e[2]) + 1 for t in range(i1, i2): # ~~> time stamp pbar.write(' x ' + str(e[3][t - i1]), 6 * ibar + 0) pbar.update(6 * ibar + 0) if not only2D: self.slf3d.appendCoreTimeSLF(ibar) self.slf2d.appendCoreTimeSLF(ibar) # ~~> ELEVATION var2d = np.swapaxes( jcope2data['el']['el'].data[t, 0, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2).ravel()[self.MASK2] self.slf2d.appendCoreVarsSLF([var2d]) if not only2D: var3d = -np.tile(self.ZPLAN, self.slf3d.NPOIN2).reshape( self.slf3d.NPOIN2, self.slf3d.NPLAN).T.ravel() var3d[self.slf3d.NPOIN3 - self.slf3d.NPOIN2:] = var2d self.slf3d.appendCoreVarsSLF([var3d]) pbar.write(' - elevation', 6 * ibar + 1) pbar.update(6 * ibar + 1) # ~~> SALINITY if only2D: var = np.swapaxes( jcope2data['salt']['salt'].data[t, 0:1, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) else: var = np.swapaxes( jcope2data['salt']['salt'].data[t, 0:self.slf3d.NPLAN, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) var2d = var[0].ravel()[self.MASK2] self.slf2d.appendCoreVarsSLF([var2d]) if not only2D: var3d = var[::-1].ravel()[self.MASK3] for ipoin in range(self.slf3d.NPOIN2): for iplan in range(self.slf3d.NPLAN - 1, 0, -1): if var3d[ipoin + (iplan - 1) * self.slf3d.NPOIN2] < -99.0: var3d[ipoin + (iplan - 1) * self.slf3d.NPOIN2] = var3d[ ipoin + iplan * self.slf3d.NPOIN2] self.slf3d.appendCoreVarsSLF([var3d]) pbar.write(' - salinity', 6 * ibar + 2) pbar.update(6 * ibar + 2) # ~~> TEMPERATURE if only2D: var = np.swapaxes( jcope2data['temp']['temp'].data[t, 0:1, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) else: var = np.swapaxes( jcope2data['temp']['temp'].data[t, 0:self.slf3d.NPLAN, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) var2d = var[0].ravel()[self.MASK2] self.slf2d.appendCoreVarsSLF([var2d]) if not only2D: var3d = var[::-1].ravel()[self.MASK3] for ipoin in range(self.slf3d.NPOIN2): for iplan in range(self.slf3d.NPLAN - 1, 0, -1): if var3d[ipoin + (iplan - 1) * self.slf3d.NPOIN2] < -99.0: var3d[ipoin + (iplan - 1) * self.slf3d.NPOIN2] = var3d[ ipoin + iplan * self.slf3d.NPOIN2] self.slf3d.appendCoreVarsSLF([var3d]) pbar.write(' - temperature', 6 * ibar + 3) pbar.update(6 * ibar + 3) # ~~> VELOCITY U if only2D: var = np.swapaxes( jcope2data['u']['u'].data[t, 0:1, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) else: var = np.swapaxes( jcope2data['u']['u'].data[t, 0:self.slf3d.NPLAN, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) var2d = var[0].ravel()[self.MASK2] self.slf2d.appendCoreVarsSLF([var2d]) if not only2D: var3d = var[::-1].ravel()[self.MASK3] for ipoin in range(self.slf3d.NPOIN2): for iplan in range(self.slf3d.NPLAN - 1, 0, -1): if var3d[ipoin + (iplan - 1) * self.slf3d.NPOIN2] < -99.0: var3d[ipoin + (iplan - 1) * self.slf3d.NPOIN2] = var3d[ ipoin + iplan * self.slf3d.NPOIN2] self.slf3d.appendCoreVarsSLF([var3d]) pbar.write(' - u-velocity', 6 * ibar + 4) pbar.update(6 * ibar + 4) # ~~> VELOCITY V if only2D: var = np.swapaxes( jcope2data['v']['v'].data[t, 0:1, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) else: var = np.swapaxes( jcope2data['v']['v'].data[t, 0:self.slf3d.NPLAN, ilat[0]:ilat[1], ilon[0]:ilon[1]][0], 1, 2) var2d = var[0].ravel()[self.MASK2] self.slf2d.appendCoreVarsSLF([var2d]) if not only2D: var3d = var[::-1].ravel()[self.MASK3] for ipoin in range(self.slf3d.NPOIN2): for iplan in range(self.slf3d.NPLAN - 1, 0, -1): if var3d[ipoin + (iplan - 1) * self.slf3d.NPOIN2] < -99.0: var3d[ipoin + (iplan - 1) * self.slf3d.NPOIN2] = var3d[ ipoin + iplan * self.slf3d.NPOIN2] self.slf3d.appendCoreVarsSLF([var3d]) pbar.write(' - v-velocity', 6 * ibar + 5) pbar.update(6 * ibar + 5) # ~~> VELOCITY W if not only2D: var3d = 0. * var3d self.slf3d.appendCoreVarsSLF([var3d]) ibar += 1 pbar.finish() if not only2D: self.slf3d.fole['hook'].close() self.slf2d.fole['hook'].close() def __del__(self): pass
class splitSELAFIN(): def __init__(self,SLFfileName,CLMfileName,SEQfileName='',splitCONLIM=False,DOMfileRoot=''): print '\n... Acquiring global files' # ~~> Acquire global CONLIM file print ' +> CONLIM file' self.clm = CONLIM(CLMfileName) self.isCONLIM = splitCONLIM # ~~> Acquire global SELAFIN file print ' +> SELAFIN file' self.slf = SELAFIN(SLFfileName) # ~~> Acquire global SELAFIN file if SEQfileName != '': print ' +> SEQUENCE file' self.NPARTS,self.NSPLIT,self.KSPLIT = self.getSplitFromSequence(np.array( getFileContent(SEQfileName), dtype='<i4' )) else: self.NPARTS,self.NSPLIT,self.KSPLIT = self.getSplitFromNodeValues('PROCESSORS') print '\n... Split by elements in ',self.NPARTS,' parts\n' # ~~> Clean inconsistencies in boundary segments self.IPOBO,self.NSPLIT,self.KSPLIT = self.setSplitForBoundaries(self.NSPLIT,self.clm.KFRGL,self.KSPLIT) self.PINTER,self.PNHALO,self.PNODDS = \ self.setSplitForElements( self.IPOBO,self.NPARTS,self.NSPLIT,self.KSPLIT ) self.slfn = self.copyCommonData() # ~~> Optional output file names self.isDOMAIN = DOMfileRoot # Make a copy of common information for sub-meshes def copyCommonData(self): SLFn = SELAFIN('') # Meta data SLFn.TITLE = self.slf.TITLE SLFn.file = self.slf.file SLFn.IPARAM = self.slf.IPARAM # Time SLFn.DATETIME = self.slf.DATETIME SLFn.tags = self.slf.tags # Variables SLFn.NBV1 = self.slf.NBV1 SLFn.VARNAMES = self.slf.VARNAMES SLFn.VARUNITS = self.slf.VARUNITS SLFn.NBV2 = self.slf.NBV2 SLFn.CLDNAMES = self.slf.CLDNAMES SLFn.CLDUNITS = self.slf.CLDUNITS SLFn.NVAR = self.slf.NVAR SLFn.VARINDEX = range(self.slf.NVAR) # Unchanged numbers SLFn.NPLAN = self.slf.NPLAN SLFn.NDP2 = self.slf.NDP2 SLFn.NDP3 = self.slf.NDP3 return SLFn # Split based on a sequence of parts, one for each element (result from METIS) def getSplitFromSequence(self,KSPLIT): # ~~> NPARTS is the number of parts /!\ does not check continuity vs. missing parts NPARTS = max(*KSPLIT) NSPLIT = np.zeros( self.slf.NPOIN2 ,dtype=np.int ) for part in range(NPARTS): k = np.compress(KSPLIT==(part+1),range(len(self.slf.IKLE))) NSPLIT[self.slf.IKLE[k]] = KSPLIT[k] return NPARTS,NSPLIT-1,KSPLIT-1 # Split based on the variable PROCESSORS, defined at the nodes def getSplitFromNodeValues(self,var): # ~~> Filter for 'PROCESSORS' as input to the getVariablesAt method i,vn = subsetVariablesSLF(var,self.slf.VARNAMES) if i == []: print '... Could not find ',var,', you may need another split method' sys.exit(1) # ~~> NSPLIT is the interger value of the variable PROCESSORS (time frame 0) NSPLIT = np.array( self.slf.getVariablesAt( 0,i )[0], dtype=np.int) # ~~> NPARTS is the number of parts /!\ does not check continuity vs. missing parts NPARTS = max(*NSPLIT) + 1 # User numbering NSPLIT starts from 0 KSPLIT = np.minimum(*(NSPLIT[self.slf.IKLE].T)) return NPARTS,NSPLIT,KSPLIT def setSplitForBoundaries(self,NSPLIT,KFRGL,KSPLIT): # ~~> Join up the global boundary nodes with the halo elements IPOBO = np.zeros(self.slf.NPOIN2,dtype=np.int) IPOBO[KFRGL.keys()] = np.array(KFRGL.values(),dtype=np.int)+1 # this is so the nonzero search is easier # ~~> Cross check partition quality -- step 1 found = True; nloop = 0 while found: found = False; nloop += 1 for k in range(len(self.slf.IKLE)): e = self.slf.IKLE[k] if KSPLIT[k] != max( NSPLIT[e] ): for p1,p2,p3 in zip([0,1,2],[1,2,0],[2,0,1]): if NSPLIT[e[p1]] != KSPLIT[k] and NSPLIT[e[p2]] != KSPLIT[k]: if IPOBO[e[p1]] != 0 and IPOBO[e[p2]] != 0: print ' ~> correcting boundary segment at iteration: ',nloop,(e[p1],e[p2]),k,KSPLIT[k],e,NSPLIT[e] NSPLIT[e[p1]] = NSPLIT[e[p3]] NSPLIT[e[p2]] = NSPLIT[e[p3]] KSPLIT[k] = NSPLIT[e[p3]] found = True # ~~> Cross check partition quality -- step 2 found = True; nloop = 0 while found: found = False; nloop += 1 for k in range(len(self.slf.IKLE)): e = self.slf.IKLE[k] if min( NSPLIT[e] ) != max( NSPLIT[e] ) and KSPLIT[k] != min( NSPLIT[e] ): print ' ~> correcting internal segment at iteration: ',nloop,k,KSPLIT[k],e,NSPLIT[e] KSPLIT[k] = min( NSPLIT[e] ) found = True return IPOBO,NSPLIT,KSPLIT # Split based on the variable PROCESSORS, defined at the nodes def setSplitForElements(self,IPOBO,NPARTS,NSPLIT,KSPLIT): SNHALO = dict([ (i,[]) for i in range(NPARTS) ]) PNODDS = dict([ (i,[]) for i in range(NPARTS) ]) SINTER = dict([ (i,[]) for i in range(NPARTS) ]) # ~~> Internal segments separating parts pbar = ProgressBar(maxval=len(self.slf.IKLE)).start() for k in range(len(self.slf.IKLE)): e = self.slf.IKLE[k] # Case 1: you are at an internal boundary element if KSPLIT[k] != max( NSPLIT[e] ): for p1,p2 in zip([0,1,2],[1,2,0]): if NSPLIT[e[p1]] != KSPLIT[k] and NSPLIT[e[p2]] != KSPLIT[k]: SINTER[KSPLIT[k]].append((e[p1],e[p2])) SINTER[min(NSPLIT[e[p1]],NSPLIT[e[p2]])].append((e[p2],e[p1])) # Case 2: you may be at an external boundary element if np.count_nonzero( IPOBO[e] ) > 1: for p1,p2 in zip([0,1,2],[1,2,0]): if IPOBO[e[p1]] != 0 and IPOBO[e[p2]] != 0: # multiplier is not possible if IPOBO[e[p1]] + 1 == IPOBO[e[p2]]: SNHALO[KSPLIT[k]].append((e[p1],e[p2])) else: PNODDS[KSPLIT[k]].append([e[p1],e[p2]]) pbar.update(k) pbar.finish() # ~~> Clean-up of funny segments looping on themselves for part in range(NPARTS): # ~~> Quickly checking through to remove duplicate segments found = True while found: found = False INTER = np.array( SINTER[part], dtype=[ ('h',int),('t',int) ] ) HEADT = np.argsort( INTER['h'] ) HLINK = np.searchsorted(INTER['h'][HEADT],INTER['t'][HEADT]) w = 0 while w < len(HLINK): if HLINK[w] < len(HLINK): if INTER['h'][HEADT[w]] == INTER['t'][HEADT[HLINK[w]]] and INTER['t'][HEADT[w]] == INTER['h'][HEADT[HLINK[w]]]: print ' ~> Removing dupicate segments in part: ',part,SINTER[part][HEADT[w]],SINTER[part][HEADT[HLINK[w]]] if HEADT[w] > HEADT[HLINK[w]]: SINTER[part].pop(HEADT[w]) SINTER[part].pop(HEADT[HLINK[w]]) else: SINTER[part].pop(HEADT[HLINK[w]]) SINTER[part].pop(HEADT[w]) found = True break w += 1 return SINTER,SNHALO,PNODDS def getIKLE(self,npart): # ~~> get IKLE for that part ... still with global element numbers GIKLE = np.compress( self.KSPLIT==npart,self.slf.IKLE,axis=0 ) KELLG = np.compress( self.KSPLIT==npart,range(len(self.slf.IKLE)),axis=0 ) # ~~> KNOLG(NPOIN3) gives the global node number such that # for i = 1,NPOIN3: Fwrite(i) = Fread(KNOLG(i)) and is ordered KNOLG,indices = np.unique( np.ravel(GIKLE), return_index=True ) KNOGL = dict(zip( KNOLG,range(len(KNOLG)) )) LIKLE = - np.ones_like(GIKLE,dtype=np.int) pbar = ProgressBar(maxval=len(GIKLE)).start() for k in range(len(GIKLE)): LIKLE[k] = [ KNOGL[GIKLE[k][0]], KNOGL[GIKLE[k][1]], KNOGL[GIKLE[k][2]] ] pbar.update(k) pbar.finish() return LIKLE,KELLG,KNOLG def resetPartition(self,part,PINTER,KSPLIT): MASKER = np.zeros(self.slf.NPOIN2,dtype=np.int) for p in PINTER: MASKER[p] = np.arange(len(p))+1 # PINTER is ordered KIKLE = np.compress(np.maximum(*(MASKER[self.slf.IKLE].T))>=0,range(len(self.slf.IKLE))) #KIKLE = np.compress(np.count_nonzero(MASKER[self.slf.IKLE],axis=1)>2,range(len(self.slf.IKLE))) # /!\ does not work ? pbar = ProgressBar(maxval=len(KIKLE)).start() for k in KIKLE: e = self.slf.IKLE[k] if np.count_nonzero( MASKER[e] ) < 2 or KSPLIT[k] == part: continue for p1,p2 in zip([0,1,2],[1,2,0]): if MASKER[e[p1]] > 0 and MASKER[e[p2]] > 0 and MASKER[e[p2]] > MASKER[e[p1]]: print ' ~> Warning for element of part: ',part,'(was:',KSPLIT[k],') ',k,e #KSPLIT[k] = part pbar.update(k) pbar.finish() return KSPLIT def joinPairs(self,polyLines): INTER = np.array( polyLines, dtype=[ ('h',int),('t',int) ] ) IDONE = np.ones( len(polyLines),dtype=np.int ) polyA = []; polyZ = []; polyL = [] # ~~> Finding the endings HEADT = np.argsort( INTER['h'] ) # knowing that INTER[HEADT] is sorted by the head HLINK = np.searchsorted(INTER['h'][HEADT],INTER['t'][HEADT]) # INTER['h'][HEADT] is sorted # ... HLINK[w] for w in INTER['t'] gives you the position of INTER['t'][w] in INTER['h'][HEADT] w = min(np.compress(np.not_equal(IDONE,IDONE*0),range(len(HEADT)))) po = INTER['h'][HEADT[w]]; pe = INTER['t'][HEADT[w]]; IDONE[w] = 0 polyA.append(po) swapMinMax = True while True: if HLINK[w] < len(INTER): if INTER['t'][HEADT][w] == INTER['h'][HEADT][HLINK[w]]: w = HLINK[w] pe = INTER['t'][HEADT][w]; IDONE[w] = 0 if pe not in polyA: if HLINK[w] < len(INTER): if INTER['t'][HEADT][w] != po and INTER['t'][HEADT][w] == INTER['h'][HEADT][HLINK[w]]: continue if po == pe: polyL.append(pe) else: if pe not in polyZ: polyZ.append(pe) else: polyA.append(po) if np.count_nonzero(IDONE) == 0: break if swapMinMax: w = max(np.compress(np.not_equal(IDONE,IDONE*0),range(len(HEADT)))) else: w = min(np.compress(np.not_equal(IDONE,IDONE*0),range(len(HEADT)))) swapMinMax = not swapMinMax po = INTER['h'][HEADT[w]]; pe = INTER['t'][HEADT[w]]; IDONE[w] = 0 polyA.append(po) # ~~> Finding the sources TAILT = np.argsort( INTER['t'] ) # knowing that INTER[TAILT] is sorted by the tail TLINK = np.searchsorted(INTER['t'][TAILT],INTER['h'][TAILT]) # INTER['h'][HEADT] is sorted # ... TLINK[w] for w in polyZ gives you the position of polyZ[w] in INTER['t'][TAILT] polyGones = [] # ~~> Finding the sources of non-looping lines TAILS = np.searchsorted(INTER['t'][TAILT],polyZ) for w in TAILS: p = [INTER['t'][TAILT[w]]] while True: if INTER['h'][TAILT][w] == INTER['t'][TAILT][TLINK[w]]: po = [INTER['h'][TAILT][w]] po.extend(p) p = po; w = TLINK[w] if TLINK[w] < len(INTER): if INTER['h'][TAILT][w] == INTER['t'][TAILT][TLINK[w]]: continue po = [INTER['h'][TAILT][w]] po.extend(p) p = po break polyGones.append(p) # ~~> Finding the sources of looping lines LOOPS = np.searchsorted(INTER['t'][TAILT],polyL) for w in LOOPS: p = [INTER['t'][TAILT[w]]] while True: if INTER['h'][TAILT][w] == INTER['t'][TAILT][TLINK[w]]: po = [INTER['h'][TAILT][w]] po.extend(p) p = po; w = TLINK[w] if INTER['h'][TAILT][w] != p[len(p)-1]: continue po = [INTER['h'][TAILT][w]] po.extend(p) p = po break polyGones.append(p) return polyGones def joinSegments(self,polyLines): polyGones = [] maxbar = max(len(polyLines),1) pbar = ProgressBar(maxval=maxbar).start() while polyLines != []: # ~~> starting point e = polyLines[0] le = len(e) a,b = e[0],e[len(e)-1] # ~~> case of closed line if a == b: polyGones.append(e[0:len(e)]) # /!\ here you keep the duplicated point polyLines.pop(0) continue # ~~> iterative process for ei,iline in zip(polyLines[1:],range(len(polyLines))[1:]): # ~~> merging the two segments if b == ei[0]: polyLines[0] = e[0:len(e)] # copy ! polyLines[0].extend(ei[1:]) polyLines.pop(iline) break if a == ei[len(ei)-1]: polyLines[0] = ei[0:len(ei)] # copy ! polyLines[0].extend(e[1:]) polyLines.pop(iline) break # ~~> completed search if le == len(polyLines[0]): polyGones.append(e[0:len(e)]) polyLines.pop(0) pbar.update(maxbar-len(polyLines)) pbar.finish() return polyGones def tetrisOddSegments(self,main,odds): polyGones = [] lo = len(odds) while main != []: # ~~> starting point e = main[0] le = len(e) a,b = e[0],e[len(e)-1] # ~~> case of closed line if a == b: polyGones.append(e[0:len(e)]) # /!\ here you keep the duplicated point main.pop(0) continue # ~~> iterative process for ei,iline in zip(odds,range(len(odds))): # ~~> merging the two segments if b == ei[0]: main[0] = e[0:len(e)] main[0].extend(ei[1:]) odds.pop(iline) break if a == ei[len(ei)-1]: main[0] = ei[0:len(ei)] main[0].extend(e[1:]) odds.pop(iline) break # ~~> completed search if le == len(main[0]): polyGones.append(e[0:len(e)]) main.pop(0) # ~~> removing the over-constrained elements for p in polyGones: if len(p) > 3: j = 2 while j < len(p): if p[j-2] == p[j]: p.pop(j-2) p.pop(j-2) j += 1 return polyGones # Filter poly according to IPOBO on that part. # ~> gloseg: is the ensemble of either closed islands or # open external boundary segments # Note: filtering now seems to mean that to have done a lot of work for nothing def globalSegments(self,poly): gloseg = [] for p in poly: pA = p[0]; pZ = p[len(p)-1]; closed = False if pA == pZ and self.IPOBO[pA] != 0: closed = True iA = 0; iZ = 0 ploseg = [] for i in p: if self.IPOBO[i] != 0: # moves the counter along for external points iZ += 1 elif iZ != 0: # you have just found the end of an external segment ploseg.append(p[iA:iA+iZ]) iA += iZ+1 iZ = 0 else: iA += 1 if iZ != 0: if closed and len(ploseg) > 0: i = p[iA:iA+iZ] i.extend(ploseg[0][1:]) # remove duplicate ploseg[0] = i else: ploseg.append(p[iA:iA+iZ]) gloseg.extend(ploseg) return gloseg def putContent(self): # ~~> Extension for parallel file names fmtn = '00000' + str(self.NPARTS-1) fmtn = fmtn[len(fmtn)-5:] print '\n... Split the boundary connectivity' # ~~> Assemble internal and external segments polyCLOSED = dict([ (i,[]) for i in range(self.NPARTS) ]) polyFILTER = dict([ (i,[]) for i in range(self.NPARTS) ]) polyGLOSED = [] for part in range(self.NPARTS): # this could be done in parallel print ' +> Joining up boundary segments for part: ',part+1 # ~~> Joining up boundaries for sub-domains print ' ~> main internal segments' self.PINTER[part] = self.joinPairs(self.PINTER[part]) print ' ~> main external segments' polyHALO = self.joinPairs(self.PNHALO[part]) polyHALO.extend(self.PINTER[part]) polyHALO = self.joinSegments(polyHALO) print ' ~> odd segments' polyODDS = self.joinSegments(self.PNODDS[part]) print ' ~> stitching with the odd ones' polyGones = self.tetrisOddSegments(polyHALO,polyODDS) print ' ~> final closure' polyCLOSED[part] = self.joinSegments(polyGones) # ~~> Building up the entire picture polyFILTER[part] = self.globalSegments(polyCLOSED[part]) polyGLOSED.extend( polyFILTER[part] ) # ~~> Joining up boundaries for the global domain (Note: seems counter productive but is not) polyGLOSED = self.joinSegments(polyGLOSED) if self.isDOMAIN != '': print '\n... Printing the domain split into a series of i2s files' # ~~> Convert node numbers into x,y for part in range(self.NPARTS): print ' +> part ',part+1,' of ',self.NPARTS polyXY = [] for pg in range(len(polyCLOSED[part])): pxy = [] for pt in range(len(polyCLOSED[part][pg])): n = polyCLOSED[part][pg][pt] pxy.append([ self.slf.MESHX[n],self.slf.MESHY[n] ]) polyXY.append(pxy) # ~~> Write polygons to double check fmti = '00000' + str(part) fmti = fmti[len(fmti)-5:] fileName = path.join(path.dirname(self.slf.file['name']),self.isDOMAIN+fmtn+'-'+fmti+'.i2s') putInS(fileName,[],'i2s',polyXY) # ~~> Convert node numbers into x,y polyXY = [] for pg in range(len(polyGLOSED)): pxy = [] for pt in range(len(polyGLOSED[pg])): n = polyGLOSED[pg][pt] pxy.append([ self.slf.MESHX[n],self.slf.MESHY[n] ]) polyXY.append(pxy) # ~~> Write polygons to double check fileName = path.join(path.dirname(self.slf.file['name']),self.isDOMAIN+'.i2s') putInS(fileName,[],'i2s',polyXY) print '\n... Final check to the element partitioning' for part in range(self.NPARTS): # this could be done in parallel self.KSPLIT = self.resetPartition(part,self.PINTER[part],self.KSPLIT) if self.isDOMAIN != '': # ~~> This is optional print '\n... Printing the domain split into a SELAFIN' fileRoot,fileExts = path.splitext(self.slf.file['name']) self.slf.fole.update({ 'hook': open(fileRoot+'_PROCS'+fileExts,'wb') }) self.slf.appendHeaderSLF() self.slf.appendCoreTimeSLF(0) VARSOR = self.slf.getVALUES(0) for v in range(self.slf.NVAR): VARSOR[v] = self.NSPLIT self.slf.appendCoreVarsSLF(VARSOR) self.slf.fole['hook'].close() print '\n... Storing the global liquid boundary numbering (NUMLIQ)' # ~~> Implying NUMLIQ and the number NFRLIQ based on the joined-up lines self.clm.setNUMLIQ(polyGLOSED) print '\n... Split the mesh connectivity' # ~~> Preliminary set up for LIKLE, KNOLG and KEMLG by parts LIKLE = dict([ (i,[]) for i in range(self.NPARTS) ]) KELLG = dict([ (i,[]) for i in range(self.NPARTS) ]) KNOLG = dict([ (i,[]) for i in range(self.NPARTS) ]) for part in range(self.NPARTS): print ' +> re-ordering IKLE for part ',part+1 LIKLE[part],KELLG[part],KNOLG[part] = self.getIKLE(part) # ~~> CONLIM file: Preliminary set up of IFAPAR and ISEG for all parts IFAPAR = dict([ (i,{}) for i in range(self.NPARTS) ]) ISEG = {} # Organising ISEG for easier call: part 1 for part in range(self.NPARTS): for i in polyFILTER[part]: if i[0] == i[len(i)-1]: continue # /!\ you are here adding one ! if i[0] in ISEG: ISEG[i[0]].update({ part:i[1]+1 }) else: ISEG.update({ i[0]:{ part:i[1]+1 } }) if i[len(i)-1] in ISEG: ISEG[i[len(i)-1]].update({ part:-i[len(i)-2]-1 }) else: ISEG.update({ i[len(i)-1]:{ part:-i[len(i)-2]-1 } }) # Switching parts of ISEG for final call: part 2 for i in ISEG: if len(ISEG[i]) != 2: print '... You have a boundary node surounded with more than two boundary segments: ',i sys.exit(1) parts = ISEG[i].keys() ISEG[i] = { parts[0]:ISEG[i][parts[1]], parts[1]:ISEG[i][parts[0]] } # ~~> CONLIM file: Preliminary set up of NPTIR for all parts NPTIR = dict([ (i,{}) for i in range(self.NPARTS) ]) for part in range(self.NPARTS): for p in self.PINTER[part]: NPTIR[part].update( dict([ (i,[]) for i in p ]) ) parts = range(self.NPARTS) while parts != []: part = parts[0] parts.pop(0) for ip in NPTIR[part]: for ipart in parts: if ip in NPTIR[ipart]: NPTIR[part][ip].append(ipart) NPTIR[ipart][ip].append(part) print '... Split of the SELAFIN file' for part in range(self.NPARTS): fmti = '00000' + str(part) fmti = fmti[len(fmti)-5:] print ' +> part ',part+1,' of ',self.NPARTS self.slfn.IKLE2 = LIKLE[part] self.slfn.NELEM2 = len(LIKLE[part]) self.slfn.NPOIN2 = len(KNOLG[part]) # ~~> IPARAM has two new values: 8:NPTFR and 9:NPTIR self.slfn.IPARAM[7] = len(np.unique(np.concatenate(polyFILTER[part]))) self.slfn.IPARAM[8] = len(NPTIR[part]) # ~~> IPOBO (or IRAND) converted into KNOLG[part] self.slfn.IPOBO = KNOLG[part]+1 print ' ~> filtering the MESH' # ~~> GEO file: MESH coordinates self.slfn.MESHX = np.zeros(self.slfn.NPOIN2,dtype=np.float32) self.slfn.MESHY = np.zeros(self.slfn.NPOIN2,dtype=np.float32) self.slfn.MESHX = self.slf.MESHX[KNOLG[part]] self.slfn.MESHY = self.slf.MESHY[KNOLG[part]] # ~~> GEO file: File names fileRoot,fileExts = path.splitext(self.slf.file['name']) self.slfn.file['name'] = fileRoot+fmtn+'-'+fmti+fileExts # ~~> GEO file: Printing print ' ~> printing: ',self.slfn.file['name'] self.slfn.fole.update({ 'hook': open(self.slfn.file['name'],'wb') }) self.slfn.appendHeaderSLF() LVARSOR = np.zeros((self.slfn.NVAR,self.slfn.NPOIN2),dtype=np.float32) for t in range(len(self.slf.tags['times'])): self.slfn.appendCoreTimeSLF(t) VARSOR = self.slf.getVALUES(t) for v in range(self.slfn.NVAR): LVARSOR[v] = VARSOR[v][KNOLG[part]] self.slfn.appendCoreVarsSLF(LVARSOR) self.slfn.fole['hook'].close() if not self.isCONLIM: return print '\n... Connect elements across internal boundaries (IFAPAR)' for part in range(self.NPARTS): print ' +> part ',part+1,' of ',self.NPARTS # ~~> CONLIM file: Preliminary set up of PEHALO elements accross internal boundaries PEHALO = {}; SEHALO = {} # Step 1: find out about the primary elements and loop through IKLE self.NSPLIT *= 0 MASKER = NPTIR[part].keys() self.NSPLIT[MASKER] += 1 print ' ~> Assembling primary elements with other side' # Sub Step 1: Assembling all edges from the other sides maxbar = 0; ibar = 0 for ip in range(self.NPARTS): maxbar += len(LIKLE[ip]) pbar = ProgressBar(maxval=maxbar).start() for otherpart in range(self.NPARTS): if otherpart == part: continue # all parts are still positive at this stage for k in range(len(LIKLE[otherpart])): ibar += 1 e = self.slf.IKLE[KELLG[otherpart][k]] if np.count_nonzero( self.NSPLIT[e] ) < 2: continue for p1,p2 in zip([1,2,0],[0,1,2]): # reverse order because looking from the other side if self.NSPLIT[e[p1]] > 0 and self.NSPLIT[e[p2]] > 0: if not (e[p1],e[p2]) in PEHALO: PEHALO.update({ (e[p1],e[p2]):[0,[]] }) PEHALO[(e[p1],e[p2])][1].append(k) PEHALO[(e[p1],e[p2])][1].append(otherpart) pbar.update(ibar) # Sub Step 2: Assembling all edges from the primary side (there are three times more of them) for k in range(len(LIKLE[part])): ibar += 1 j = KELLG[part][k] e = self.slf.IKLE[j] if np.count_nonzero( self.NSPLIT[e] ) < 2: continue for p1,p2,p3 in zip([0,1,2],[1,2,0],[2,0,1]): if self.NSPLIT[e[p1]] > 0 and self.NSPLIT[e[p2]] > 0: if (e[p1],e[p2]) in PEHALO: # the good side opposes the dark side PEHALO[(e[p1],e[p2])][0] = k if self.NSPLIT[e[p3]] == 0: self.NSPLIT[e[p3]] = -1 if self.NSPLIT[e[p3]] == -1: if not (e[p1],e[p3]) in SEHALO: SEHALO.update({ (e[p1],e[p3]):[] }) SEHALO[(e[p1],e[p3])].append(k) if not (e[p2],e[p3]) in SEHALO: SEHALO.update({ (e[p2],e[p3]):[] }) SEHALO[(e[p2],e[p3])].append(k) else: # self.NSPLIT[e[p3]] must be 2 ! if not (e[p3],e[p1]) in SEHALO: SEHALO.update({ (e[p3],e[p1]):[] }) if k not in SEHALO[(e[p3],e[p1])]: SEHALO[(e[p3],e[p1])].append(k) if not (e[p2],e[p3]) in SEHALO: SEHALO.update({ (e[p2],e[p3]):[] }) if k not in SEHALO[(e[p2],e[p3])]: SEHALO[(e[p2],e[p3])].append(k) if self.KSPLIT[j] >= 0: self.KSPLIT[j] = -(self.KSPLIT[j]+1) # /!\ This is very dangerous but necessary pbar.update(ibar) pbar.finish() # Sub Step 3: Final clean up of the other side ? no need but check later for (ei)[0] == 0 # Step 2: find out about the secondary elements on IKLE ( local LIKLE ? ) print ' ~> Assembling secondary elements of that side' pbar = ProgressBar(maxval=len(LIKLE[part])).start() for k in range(len(LIKLE[part])): j = KELLG[part][k] e = self.slf.IKLE[j] if self.KSPLIT[j] != part: continue if np.count_nonzero( self.NSPLIT[e] ) < 2: continue for i in [0,1,2]: ii = (i+1)%3 if self.NSPLIT[e[i]] > 0 and self.NSPLIT[e[ii]] < 0 and (e[i],e[ii]) in SEHALO: SEHALO[(e[i],e[ii])].append(k) # correct orientation if self.NSPLIT[e[i]] > 0 and self.NSPLIT[e[ii]] > 0 and (e[ii],e[i]) in SEHALO: SEHALO[(e[ii],e[i])].append(k) # opposite orientation ii = (i+2)%3 if self.NSPLIT[e[i]] > 0 and self.NSPLIT[e[ii]] < 0 and (e[i],e[ii]) in SEHALO: SEHALO[(e[i],e[ii])].append(k) # correct orientation if self.NSPLIT[e[i]] > 0 and self.NSPLIT[e[ii]] > 0 and (e[i],e[ii]) in SEHALO: SEHALO[(e[i],e[ii])].append(k) # opposite orientation if self.KSPLIT[j] < 0: self.KSPLIT[j] = -self.KSPLIT[j] - 1 # /!\ back to a safe place pbar.update(k) pbar.finish() # Step 3: finally cross reference information between SEHALO and PEHALO print ' ~> Combining sides surrounding the halo-elements' for ie in PEHALO: if PEHALO[ie][0] == 0: continue k = PEHALO[ie][0] # element number in its local part numbering if not k in IFAPAR[part]: IFAPAR[part].update({ k:[-2,-1,-2,-1,-2,-1] }) j = KELLG[part][k] e = self.slf.IKLE[j] for p1,p2 in zip([0,1,2],[1,2,0]): if (e[p1],e[p2]) in SEHALO: if len(SEHALO[(e[p1],e[p2])]) > 1: if SEHALO[(e[p1],e[p2])][0] == k: IFAPAR[part][k][2*p1] = SEHALO[(e[p1],e[p2])][1] if SEHALO[(e[p1],e[p2])][1] == k: IFAPAR[part][k][2*p1] = SEHALO[(e[p1],e[p2])][0] IFAPAR[part][k][1+2*p1] = part if (e[p2],e[p1]) in SEHALO: if len(SEHALO[(e[p2],e[p1])]) > 1: if SEHALO[(e[p2],e[p1])][0] == k: IFAPAR[part][k][2*p1] = SEHALO[(e[p2],e[p1])][1] if SEHALO[(e[p2],e[p1])][1] == k: IFAPAR[part][k][2*p1] = SEHALO[(e[p2],e[p1])][0] IFAPAR[part][k][1+2*p1] = part if ie == (e[p1],e[p2]): IFAPAR[part][k][2*p1] = PEHALO[ie][1][0] IFAPAR[part][k][1+2*p1] = PEHALO[ie][1][1] # ~~> CONLIM file: Write to file ... pfuuuuuh ... this is it ! print '\n... Split of the CONLIM files' for part in range(self.NPARTS): fmti = '00000' + str(part) fmti = fmti[len(fmti)-5:] print ' +> part: ',part+1,' of ',self.NPARTS # ~~> CONLIM file: Set the filter INDEX = np.zeros_like(self.clm.INDEX,dtype=np.int) for contour in polyFILTER[part]: # ~~> Closed contour: no need to change ISEG if contour[0] == contour[len(contour)-1]: for c in contour[1:]: INDEX[self.clm.KFRGL[c]] = self.clm.KFRGL[c]+1 # ~~> Open contour: need to change ISEG with neighbours else: for c in contour[0:]: INDEX[self.clm.KFRGL[c]] = self.clm.KFRGL[c]+1 iA = self.clm.KFRGL[contour[0]] self.clm.POR['is'][iA] = ISEG[contour[0]][part] self.clm.POR['xs'][iA] = self.slf.MESHX[abs(ISEG[contour[0]][part])-1] # /!\ MESHX start at 0 self.clm.POR['ys'][iA] = self.slf.MESHY[abs(ISEG[contour[0]][part])-1] # /!\ MESHY start at 0 iA = self.clm.KFRGL[contour[len(contour)-1]] self.clm.POR['is'][iA] = ISEG[contour[len(contour)-1]][part] self.clm.POR['xs'][iA] = self.slf.MESHX[abs(ISEG[contour[len(contour)-1]][part])-1] self.clm.POR['ys'][iA] = self.slf.MESHY[abs(ISEG[contour[len(contour)-1]][part])-1] self.clm.INDEX = INDEX # ~~> CONLIM file: Set the NPTIR and CUTs self.clm.NPTIR = NPTIR[part] # ~~> CONLIM file: Set the IFAPAR self.clm.IFAPAR = IFAPAR[part] # ~~> CONLIM file fileRoot,fileExts = path.splitext(self.clm.fileName) print ' ~> printing: ',fileRoot+fmtn+'-'+fmti+fileExts self.clm.putContent(fileRoot+fmtn+'-'+fmti+fileExts) return
class ECMWF(): def __init__(self, dataset, dates, request): # ~~> inheritence self.slf2d = SELAFIN('') # surface self.slf2d.DATETIME = dates[0] # ~> Initialisation self.moddates = [datetime(*dates[0]), datetime(*dates[1])] status = '' self.request = request # ~> Establish connection self.connection = Connection(config['email'], config['key'], quiet=True, verbose=False) # ~> Verify connection user = self.connection.call("%s/%s" % (config['url'], "who-am-i")) print ' ~> access through username: %s\n' % ( user["full_name"] or "user '%s'" % user["uid"], ) # ~> Request dataset self.connection.submit("%s/%s/requests" % (config['url'], dataset), request) status = self.connection.status print ' ~> request has been', status # ~> Wait for remote processing while not self.connection.ready(): if status != self.connection.status: status = self.connection.status print ' ~> request remains', status, '...' self.connection.wait() # ~> Request completed print ' ~> request is now', self.connection.status self.connection.cleanup() def downloadECMWF(self): result = self.connection.result() fileName = self.request.get("target") # ~> tries connecting 3 times before stopping tries = 0 while True: # ~> downloading file by blocks http = urllib2.urlopen(result["href"]) f = open(fileName, "wb") ibar = 0 pbar = ProgressBar(maxval=result["size"]).start() while True: chunk = http.read(1024 * 1024) if not chunk: break f.write(chunk) ibar += len(chunk) pbar.update(ibar) f.flush() f.close() pbar.finish() # ~> have I got everything ? if ibar == result["size"]: break if tries == 3: print " ... exhausted the number of download trials.\nYou may wish to attempt this again later." sys.exit() print " ... trying to download the data once more ..." tries += 1 def appendHeaderECMWF(self, ecmwfdata): # ~~> variables self.slf2d.TITLE = '' self.slf2d.NBV1 = len( ecmwfdata.variables) - 3 # less longitude, latitude and time self.slf2d.NVAR = self.slf2d.NBV1 self.slf2d.VARINDEX = range(self.slf2d.NVAR) self.slf2d.VARNAMES = ['SURFACE PRESSURE', \ 'WIND VELOCITY U ','WIND VELOCITY V ', \ 'AIR TEMPERATURE '] self.slf2d.VARUNITS = ['UI ', \ 'M/S ','M/S ', \ 'DEGREES '] # ~~> 2D grid x = ecmwfdata.variables['longitude'][:] NX1D = len(x) y = ecmwfdata.variables['latitude'][:] NY1D = len(y) self.slf2d.MESHX = np.tile(x, NY1D).reshape(NY1D, NX1D).T.ravel() self.slf2d.MESHY = np.tile(y, NX1D) # ~~> lat,lon correction for i in range(NX1D): if (self.slf2d.MESHX[i] > 180): self.slf2d.MESHX[i] = self.slf2d.MESHX[i] - 360.0 #for i in range(2172,NY1D): # self.slf2d.MESHY[i] = 47.0 + ( i-2172 )/18.0 self.slf2d.NPLAN = 1 self.slf2d.NDP2 = 3 self.slf2d.NDP3 = self.slf2d.NDP2 self.slf2d.NPOIN2 = NX1D * NY1D self.slf2d.NPOIN3 = self.slf2d.NPOIN2 self.slf2d.NELEM2 = 2 * (NX1D - 1) * (NY1D - 1) self.slf2d.NELEM3 = self.slf2d.NELEM2 self.slf2d.IPARAM = [0, 0, 0, 0, 0, 0, 1, 0, 0, 0] # ~~> Connectivity ielem = 0 pbar = ProgressBar(maxval=self.slf2d.NELEM3).start() self.slf2d.IKLE3 = np.zeros((self.slf2d.NELEM3, self.slf2d.NDP3), dtype=np.int) for i in range(1, NX1D): for j in range(1, NY1D): ipoin = (i - 1) * NY1D + j - 1 # ~~> first triangle self.slf2d.IKLE3[ielem][0] = ipoin self.slf2d.IKLE3[ielem][1] = ipoin + NY1D self.slf2d.IKLE3[ielem][2] = ipoin + 1 ielem = ielem + 1 pbar.update(ielem) # ~~> second triangle self.slf2d.IKLE3[ielem][0] = ipoin + NY1D self.slf2d.IKLE3[ielem][1] = ipoin + NY1D + 1 self.slf2d.IKLE3[ielem][2] = ipoin + 1 ielem = ielem + 1 pbar.update(ielem) pbar.finish() # ~~> Boundaries pbar = ProgressBar(maxval=NX1D + NY1D).start() self.slf2d.IPOB3 = np.zeros(self.slf2d.NPOIN3, dtype=np.int) # ~~> along the x-axis (lon) for i in range(NX1D): ipoin = i * NY1D self.slf2d.IPOB3[ipoin] = i + 1 ipoin = i * NY1D - 1 self.slf2d.IPOB3[ipoin] = 2 * NX1D + (NY1D - 2) - i pbar.update(i) # ~~> along the y-axis (alt) for i in range(1, NY1D): ipoin = i self.slf2d.IPOB3[ipoin] = 2 * NX1D + 2 * (NY1D - 2) - i + 1 ipoin = NY1D * (NX1D - 1) + i self.slf2d.IPOB3[ipoin] = NX1D + i pbar.update(i + NX1D) pbar.finish() # ~~~~ Time records ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ATs = ecmwfdata.variables['time'][:] self.slf2d.tags = { 'times': 3600 * (ATs - ATs[0]) } # time record in hours # self.slf2d.DATETIME = period[0] ... already set self.slf2d.appendHeaderSLF() def appendCoreTimeECMWF(self, t): self.slf2d.appendCoreTimeSLF(t) def appendCoreVarsECMWF(self, ecmwfdata, itime): # Note: this is how you get to the attributes ... # ecmwfdata.variables['sp'].ncattrs() # in particular ... # ecmwfdata.variables['sp'].units # ecmwfdata.variables['sp'].missing_value # ~~> SURFACE PRESSURE == 'sp' var2d = np.swapaxes(ecmwfdata.variables['sp'][itime][:], 0, 1).ravel() varof = ecmwfdata.variables['sp'].add_offset varsf = ecmwfdata.variables['sp'].scale_factor #print ecmwfdata.variables['sp'].units self.slf2d.appendCoreVarsSLF([varsf * var2d + varof]) # ~~> WIND VELOCITY U == 'u10' var2d = np.swapaxes(ecmwfdata.variables['u10'][itime][:], 0, 1).ravel() varof = ecmwfdata.variables['u10'].add_offset varsf = ecmwfdata.variables['u10'].scale_factor #print ecmwfdata.variables['u10'].units self.slf2d.appendCoreVarsSLF([varsf * var2d + varof]) # ~~> WIND VELOCITY V == 'v10' var2d = np.swapaxes(ecmwfdata.variables['v10'][itime][:], 0, 1).ravel() varof = ecmwfdata.variables['v10'].add_offset varsf = ecmwfdata.variables['v10'].scale_factor #print ecmwfdata.variables['v10'].units self.slf2d.appendCoreVarsSLF([varsf * var2d + varof]) # ~~> AIR TEMPERATURE == 't2m' var2d = np.swapaxes(ecmwfdata.variables['t2m'][itime][:], 0, 1).ravel() varof = ecmwfdata.variables['t2m'].add_offset varsf = ecmwfdata.variables['t2m'].scale_factor self.slf2d.appendCoreVarsSLF([varsf * var2d + varof - 273.15 ]) # Kelvin to Celsius def putContent(self, fileName, showbar=True): # ~~> netcdf reader ecmwfdata = netcdf.netcdf_file(self.request.get("target"), 'r') # ~~> new SELAFIN writer self.slf2d.fole = {} self.slf2d.fole.update({'hook': open(fileName, 'wb')}) self.slf2d.fole.update({'name': fileName}) self.slf2d.fole.update({'endian': ">"}) # big endian self.slf2d.fole.update({'float': ('f', 4)}) # single precision print ' +> Write SELAFIN header' self.appendHeaderECMWF(ecmwfdata) print ' +> Write SELAFIN core' ibar = 0 if showbar: pbar = ProgressBar(maxval=len(self.slf2d.tags['times'])).start() for t in range(len(self.slf2d.tags['times'])): self.appendCoreTimeECMWF(t) self.appendCoreVarsECMWF(ecmwfdata, ibar) ibar += 1 if showbar: pbar.update(ibar) self.slf2d.fole['hook'].close() if showbar: pbar.finish()
print ' +> extracting variables' data = getValueHistorySLF( slf.file,slf.tags,[0],support3d,slf.NVAR,slf.NPOIN3,slf.NPLAN,vars ) # special case for TEMPERATURE and SALINITY data[3] = np.maximum( data[3],zeros ) data[4] = np.maximum( data[4],zeros ) print ' +> correcting variables' # duplicate values below bottom d = np.reshape(np.transpose(np.reshape(np.ravel(data),(ini.NVAR,ini.NPOIN2,ini.NPLAN)),(0,2,1)),(ini.NVAR,ini.NPOIN3)) #for ipoin in range(ini.NPOIN2): # for iplan in range(ini.NPLAN-1,0,-1): # for ivar in range(ini.NVAR)[1:]: # except for Z # if bat[ipoin] > d[0][ipoin+(iplan-1)*ini.NPOIN2]: # d[ivar][ipoin+(iplan-1)*ini.NPOIN2] = d[ivar][ipoin+iplan*ini.NPOIN2] # if d[3][ipoin+(iplan-1)*ini.NPOIN2] < 28.0: # d[3][ipoin+(iplan-1)*ini.NPOIN2] = max(d[3][ipoin+iplan*ini.NPOIN2],28.0) print ' +> writing variables' ini.appendCoreTimeSLF( 0 ) ini.appendCoreVarsSLF( d ) # Close iniFile ini.fole['hook'].close() # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< # ~~~~ Jenkins' success message ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print '\n\nMy work is done\n\n' sys.exit(0)
atmDATES = slf.DATETIME atmTIMES = slf.tags['times'] atm.tags['times'] = slf.tags['times'] # VARIABLE extraction vars = subsetVariablesSLF( "SURFACE PRESSURE: ;WIND VELOCITY U: ;WIND VELOCITY V: ;AIR TEMPERATURE: ", slf.VARNAMES) # Read / Write data, one time step at a time to support large files pbar = ProgressBar(maxval=len(slf.tags['times'])).start() for t in range(len(slf.tags['times'])): data = getValueHistorySLF(slf.file, slf.tags, [t], support3d, slf.NVAR, slf.NPOIN3, slf.NPLAN, vars) # special cases ? atm.appendCoreTimeSLF(t) atm.appendCoreVarsSLF( np.reshape( np.transpose( np.reshape(np.ravel(data), (atm.NVAR, atm.NPOIN2, atm.NPLAN)), (0, 2, 1)), (atm.NVAR, atm.NPOIN3))) pbar.update(t) pbar.finish() # Close atmFile atm.fole['hook'].close() # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< # ~~~~ Jenkins' success message ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ print '\n\nMy work is done\n\n'
#print ' +> Set SELAFIN times and cores' # these two lists are empty after constructor is instantiated slf2d.tags['cores'].append(0) slf2d.tags['times'].append(0) #slf2d.tags = { 'times':[0] } # time (sec) #slf2d.DATETIME = sel.DATETIME slf2d.DATETIME = [2015, 1, 1, 1, 1, 1] #slf2d.tags = { 'cores':[long(0)] } # time frame #print ' +> Write SELAFIN headers' slf2d.fole.update({'hook': open(slf_file, 'w')}) slf2d.fole.update({'name': 'Converted from gmsh by pputils'}) slf2d.fole.update({'endian': ">"}) # big endian slf2d.fole.update({'float': ('f', 4)}) # single precision slf2d.appendHeaderSLF() slf2d.appendCoreTimeSLF(0) slf2d.appendCoreVarsSLF([z]) # to write the *.cli file # to create the *.cli output file cli_file = slf_file[0:len(slf_file) - 4] + str(".cli") fout = open(cli_file, "w") for i in range(len(b)): fout.write( str('2 2 2 0.000 0.000 0.000 0.000 2 0.000 0.000 0.000 ') + str(slf2d.IPOB2[i] + 1) + " " + str(i + 1) + " " + "\n")
class splitSELAFIN: def __init__(self, SLFfileName, CLMfileName, SEQfileName="", splitCONLIM=False, DOMfileRoot=""): print "\n... Acquiring global files" # ~~> Acquire global CONLIM file print " +> CONLIM file" self.clm = CONLIM(CLMfileName) self.isCONLIM = splitCONLIM # ~~> Acquire global SELAFIN file print " +> SELAFIN file" self.slf = SELAFIN(SLFfileName) # ~~> Acquire global SELAFIN file if SEQfileName != "": print " +> SEQUENCE file" self.NPARTS, self.NSPLIT, self.KSPLIT = self.getSplitFromSequence( np.array(getFileContent(SEQfileName), dtype="<i4")) else: self.NPARTS, self.NSPLIT, self.KSPLIT = self.getSplitFromNodeValues( "PROCESSORS") print "\n... Split by elements in ", self.NPARTS, " parts\n" # ~~> Clean inconsistencies in boundary segments self.IPOBO, self.NSPLIT, self.KSPLIT = self.setSplitForBoundaries( self.NSPLIT, self.clm.KFRGL, self.KSPLIT) self.PINTER, self.PNHALO, self.PNODDS = self.setSplitForElements( self.IPOBO, self.NPARTS, self.NSPLIT, self.KSPLIT) self.slfn = self.copyCommonData() # ~~> Optional output file names self.isDOMAIN = DOMfileRoot # Make a copy of common information for sub-meshes def copyCommonData(self): SLFn = SELAFIN("") # Meta data SLFn.TITLE = self.slf.TITLE SLFn.file = self.slf.file SLFn.IPARAM = self.slf.IPARAM # Time SLFn.DATETIME = self.slf.DATETIME SLFn.tags = self.slf.tags # Variables SLFn.NBV1 = self.slf.NBV1 SLFn.VARNAMES = self.slf.VARNAMES SLFn.VARUNITS = self.slf.VARUNITS SLFn.NBV2 = self.slf.NBV2 SLFn.CLDNAMES = self.slf.CLDNAMES SLFn.CLDUNITS = self.slf.CLDUNITS SLFn.NVAR = self.slf.NVAR SLFn.VARINDEX = range(self.slf.NVAR) # Unchanged numbers SLFn.NPLAN = self.slf.NPLAN SLFn.NDP2 = self.slf.NDP2 SLFn.NDP3 = self.slf.NDP3 return SLFn # Split based on a sequence of parts, one for each element (result from METIS) def getSplitFromSequence(self, KSPLIT): # ~~> NPARTS is the number of parts /!\ does not check continuity vs. missing parts NPARTS = max(*KSPLIT) NSPLIT = np.zeros(self.slf.NPOIN2, dtype=np.int) for part in range(NPARTS): k = np.compress(KSPLIT == (part + 1), range(len(self.slf.IKLE))) NSPLIT[self.slf.IKLE[k]] = KSPLIT[k] return NPARTS, NSPLIT - 1, KSPLIT - 1 # Split based on the variable PROCESSORS, defined at the nodes def getSplitFromNodeValues(self, var): # ~~> Filter for 'PROCESSORS' as input to the getVariablesAt method i, vn = subsetVariablesSLF(var, self.slf.VARNAMES) if i == []: print "... Could not find ", var, ", you may need another split method" sys.exit(1) # ~~> NSPLIT is the interger value of the variable PROCESSORS (time frame 0) NSPLIT = np.array(self.slf.getVariablesAt(0, i)[0], dtype=np.int) # ~~> NPARTS is the number of parts /!\ does not check continuity vs. missing parts NPARTS = max(*NSPLIT) + 1 # User numbering NSPLIT starts from 0 KSPLIT = np.minimum(*(NSPLIT[self.slf.IKLE].T)) return NPARTS, NSPLIT, KSPLIT def setSplitForBoundaries(self, NSPLIT, KFRGL, KSPLIT): # ~~> Join up the global boundary nodes with the halo elements IPOBO = np.zeros(self.slf.NPOIN2, dtype=np.int) IPOBO[KFRGL.keys()] = np.array( KFRGL.values(), dtype=np.int) + 1 # this is so the nonzero search is easier # ~~> Cross check partition quality -- step 1 found = True nloop = 0 while found: found = False nloop += 1 for k in range(len(self.slf.IKLE)): e = self.slf.IKLE[k] if KSPLIT[k] != max(NSPLIT[e]): for p1, p2, p3 in zip([0, 1, 2], [1, 2, 0], [2, 0, 1]): if NSPLIT[e[p1]] != KSPLIT[k] and NSPLIT[ e[p2]] != KSPLIT[k]: if IPOBO[e[p1]] != 0 and IPOBO[e[p2]] != 0: print " ~> correcting boundary segment at iteration: ", nloop, ( e[p1], e[p2], ), k, KSPLIT[k], e, NSPLIT[e] NSPLIT[e[p1]] = NSPLIT[e[p3]] NSPLIT[e[p2]] = NSPLIT[e[p3]] KSPLIT[k] = NSPLIT[e[p3]] found = True # ~~> Cross check partition quality -- step 2 found = True nloop = 0 while found: found = False nloop += 1 for k in range(len(self.slf.IKLE)): e = self.slf.IKLE[k] if min(NSPLIT[e]) != max(NSPLIT[e]) and KSPLIT[k] != min( NSPLIT[e]): print " ~> correcting internal segment at iteration: ", nloop, k, KSPLIT[ k], e, NSPLIT[e] KSPLIT[k] = min(NSPLIT[e]) found = True return IPOBO, NSPLIT, KSPLIT # Split based on the variable PROCESSORS, defined at the nodes def setSplitForElements(self, IPOBO, NPARTS, NSPLIT, KSPLIT): SNHALO = dict([(i, []) for i in range(NPARTS)]) PNODDS = dict([(i, []) for i in range(NPARTS)]) SINTER = dict([(i, []) for i in range(NPARTS)]) # ~~> Internal segments separating parts pbar = ProgressBar(maxval=len(self.slf.IKLE)).start() for k in range(len(self.slf.IKLE)): e = self.slf.IKLE[k] # Case 1: you are at an internal boundary element if KSPLIT[k] != max(NSPLIT[e]): for p1, p2 in zip([0, 1, 2], [1, 2, 0]): if NSPLIT[e[p1]] != KSPLIT[k] and NSPLIT[ e[p2]] != KSPLIT[k]: SINTER[KSPLIT[k]].append((e[p1], e[p2])) SINTER[min(NSPLIT[e[p1]], NSPLIT[e[p2]])].append( (e[p2], e[p1])) # Case 2: you may be at an external boundary element if np.count_nonzero(IPOBO[e]) > 1: for p1, p2 in zip([0, 1, 2], [1, 2, 0]): if IPOBO[e[p1]] != 0 and IPOBO[ e[p2]] != 0: # multiplier is not possible if IPOBO[e[p1]] + 1 == IPOBO[e[p2]]: SNHALO[KSPLIT[k]].append((e[p1], e[p2])) else: PNODDS[KSPLIT[k]].append([e[p1], e[p2]]) pbar.update(k) pbar.finish() # ~~> Clean-up of funny segments looping on themselves for part in range(NPARTS): # ~~> Quickly checking through to remove duplicate segments found = True while found: found = False INTER = np.array(SINTER[part], dtype=[("h", int), ("t", int)]) HEADT = np.argsort(INTER["h"]) HLINK = np.searchsorted(INTER["h"][HEADT], INTER["t"][HEADT]) w = 0 while w < len(HLINK): if HLINK[w] < len(HLINK): if (INTER["h"][HEADT[w]] == INTER["t"][HEADT[HLINK[w]]] and INTER["t"][HEADT[w]] == INTER["h"][HEADT[HLINK[w]]]): print " ~> Removing dupicate segments in part: ", part, SINTER[ part][HEADT[w]], SINTER[part][HEADT[HLINK[w]]] if HEADT[w] > HEADT[HLINK[w]]: SINTER[part].pop(HEADT[w]) SINTER[part].pop(HEADT[HLINK[w]]) else: SINTER[part].pop(HEADT[HLINK[w]]) SINTER[part].pop(HEADT[w]) found = True break w += 1 return SINTER, SNHALO, PNODDS def getIKLE(self, npart): # ~~> get IKLE for that part ... still with global element numbers GIKLE = np.compress(self.KSPLIT == npart, self.slf.IKLE, axis=0) KELLG = np.compress(self.KSPLIT == npart, range(len(self.slf.IKLE)), axis=0) # ~~> KNOLG(NPOIN3) gives the global node number such that # for i = 1,NPOIN3: Fwrite(i) = Fread(KNOLG(i)) and is ordered KNOLG, indices = np.unique(np.ravel(GIKLE), return_index=True) KNOGL = dict(zip(KNOLG, range(len(KNOLG)))) LIKLE = -np.ones_like(GIKLE, dtype=np.int) pbar = ProgressBar(maxval=len(GIKLE)).start() for k in range(len(GIKLE)): LIKLE[k] = [ KNOGL[GIKLE[k][0]], KNOGL[GIKLE[k][1]], KNOGL[GIKLE[k][2]] ] pbar.update(k) pbar.finish() return LIKLE, KELLG, KNOLG def resetPartition(self, part, PINTER, KSPLIT): MASKER = np.zeros(self.slf.NPOIN2, dtype=np.int) for p in PINTER: MASKER[p] = np.arange(len(p)) + 1 # PINTER is ordered KIKLE = np.compress( np.maximum(*(MASKER[self.slf.IKLE].T)) >= 0, range(len(self.slf.IKLE))) # KIKLE = np.compress(np.count_nonzero(MASKER[self.slf.IKLE],axis=1)>2,range(len(self.slf.IKLE))) # /!\ does not work ? pbar = ProgressBar(maxval=len(KIKLE)).start() for k in KIKLE: e = self.slf.IKLE[k] if np.count_nonzero(MASKER[e]) < 2 or KSPLIT[k] == part: continue for p1, p2 in zip([0, 1, 2], [1, 2, 0]): if MASKER[e[p1]] > 0 and MASKER[e[p2]] > 0 and MASKER[ e[p2]] > MASKER[e[p1]]: print " ~> Warning for element of part: ", part, "(was:", KSPLIT[ k], ") ", k, e # KSPLIT[k] = part pbar.update(k) pbar.finish() return KSPLIT def joinPairs(self, polyLines): INTER = np.array(polyLines, dtype=[("h", int), ("t", int)]) IDONE = np.ones(len(polyLines), dtype=np.int) polyA = [] polyZ = [] polyL = [] # ~~> Finding the endings HEADT = np.argsort( INTER["h"]) # knowing that INTER[HEADT] is sorted by the head HLINK = np.searchsorted( INTER["h"][HEADT], INTER["t"][HEADT]) # INTER['h'][HEADT] is sorted # ... HLINK[w] for w in INTER['t'] gives you the position of INTER['t'][w] in INTER['h'][HEADT] w = min(np.compress(np.not_equal(IDONE, IDONE * 0), range(len(HEADT)))) po = INTER["h"][HEADT[w]] pe = INTER["t"][HEADT[w]] IDONE[w] = 0 polyA.append(po) swapMinMax = True while True: if HLINK[w] < len(INTER): if INTER["t"][HEADT][w] == INTER["h"][HEADT][HLINK[w]]: w = HLINK[w] pe = INTER["t"][HEADT][w] IDONE[w] = 0 if pe not in polyA: if HLINK[w] < len(INTER): if INTER["t"][HEADT][w] != po and INTER["t"][HEADT][ w] == INTER["h"][HEADT][HLINK[w]]: continue if po == pe: polyL.append(pe) else: if pe not in polyZ: polyZ.append(pe) else: polyA.append(po) if np.count_nonzero(IDONE) == 0: break if swapMinMax: w = max( np.compress(np.not_equal(IDONE, IDONE * 0), range(len(HEADT)))) else: w = min( np.compress(np.not_equal(IDONE, IDONE * 0), range(len(HEADT)))) swapMinMax = not swapMinMax po = INTER["h"][HEADT[w]] pe = INTER["t"][HEADT[w]] IDONE[w] = 0 polyA.append(po) # ~~> Finding the sources TAILT = np.argsort( INTER["t"]) # knowing that INTER[TAILT] is sorted by the tail TLINK = np.searchsorted( INTER["t"][TAILT], INTER["h"][TAILT]) # INTER['h'][HEADT] is sorted # ... TLINK[w] for w in polyZ gives you the position of polyZ[w] in INTER['t'][TAILT] polyGones = [] # ~~> Finding the sources of non-looping lines TAILS = np.searchsorted(INTER["t"][TAILT], polyZ) for w in TAILS: p = [INTER["t"][TAILT[w]]] while True: if INTER["h"][TAILT][w] == INTER["t"][TAILT][TLINK[w]]: po = [INTER["h"][TAILT][w]] po.extend(p) p = po w = TLINK[w] if TLINK[w] < len(INTER): if INTER["h"][TAILT][w] == INTER["t"][TAILT][TLINK[w]]: continue po = [INTER["h"][TAILT][w]] po.extend(p) p = po break polyGones.append(p) # ~~> Finding the sources of looping lines LOOPS = np.searchsorted(INTER["t"][TAILT], polyL) for w in LOOPS: p = [INTER["t"][TAILT[w]]] while True: if INTER["h"][TAILT][w] == INTER["t"][TAILT][TLINK[w]]: po = [INTER["h"][TAILT][w]] po.extend(p) p = po w = TLINK[w] if INTER["h"][TAILT][w] != p[len(p) - 1]: continue po = [INTER["h"][TAILT][w]] po.extend(p) p = po break polyGones.append(p) return polyGones def joinSegments(self, polyLines): polyGones = [] maxbar = max(len(polyLines), 1) pbar = ProgressBar(maxval=maxbar).start() while polyLines != []: # ~~> starting point e = polyLines[0] le = len(e) a, b = e[0], e[len(e) - 1] # ~~> case of closed line if a == b: polyGones.append( e[0:len(e)]) # /!\ here you keep the duplicated point polyLines.pop(0) continue # ~~> iterative process for ei, iline in zip(polyLines[1:], range(len(polyLines))[1:]): # ~~> merging the two segments if b == ei[0]: polyLines[0] = e[0:len(e)] # copy ! polyLines[0].extend(ei[1:]) polyLines.pop(iline) break if a == ei[len(ei) - 1]: polyLines[0] = ei[0:len(ei)] # copy ! polyLines[0].extend(e[1:]) polyLines.pop(iline) break # ~~> completed search if le == len(polyLines[0]): polyGones.append(e[0:len(e)]) polyLines.pop(0) pbar.update(maxbar - len(polyLines)) pbar.finish() return polyGones def tetrisOddSegments(self, main, odds): polyGones = [] lo = len(odds) while main != []: # ~~> starting point e = main[0] le = len(e) a, b = e[0], e[len(e) - 1] # ~~> case of closed line if a == b: polyGones.append( e[0:len(e)]) # /!\ here you keep the duplicated point main.pop(0) continue # ~~> iterative process for ei, iline in zip(odds, range(len(odds))): # ~~> merging the two segments if b == ei[0]: main[0] = e[0:len(e)] main[0].extend(ei[1:]) odds.pop(iline) break if a == ei[len(ei) - 1]: main[0] = ei[0:len(ei)] main[0].extend(e[1:]) odds.pop(iline) break # ~~> completed search if le == len(main[0]): polyGones.append(e[0:len(e)]) main.pop(0) # ~~> removing the over-constrained elements for p in polyGones: if len(p) > 3: j = 2 while j < len(p): if p[j - 2] == p[j]: p.pop(j - 2) p.pop(j - 2) j += 1 return polyGones # Filter poly according to IPOBO on that part. # ~> gloseg: is the ensemble of either closed islands or # open external boundary segments # Note: filtering now seems to mean that to have done a lot of work for nothing def globalSegments(self, poly): gloseg = [] for p in poly: pA = p[0] pZ = p[len(p) - 1] closed = False if pA == pZ and self.IPOBO[pA] != 0: closed = True iA = 0 iZ = 0 ploseg = [] for i in p: if self.IPOBO[ i] != 0: # moves the counter along for external points iZ += 1 elif iZ != 0: # you have just found the end of an external segment ploseg.append(p[iA:iA + iZ]) iA += iZ + 1 iZ = 0 else: iA += 1 if iZ != 0: if closed and len(ploseg) > 0: i = p[iA:iA + iZ] i.extend(ploseg[0][1:]) # remove duplicate ploseg[0] = i else: ploseg.append(p[iA:iA + iZ]) gloseg.extend(ploseg) return gloseg def putContent(self): # ~~> Extension for parallel file names fmtn = "00000" + str(self.NPARTS - 1) fmtn = fmtn[len(fmtn) - 5:] print "\n... Split the boundary connectivity" # ~~> Assemble internal and external segments polyCLOSED = dict([(i, []) for i in range(self.NPARTS)]) polyFILTER = dict([(i, []) for i in range(self.NPARTS)]) polyGLOSED = [] for part in range(self.NPARTS): # this could be done in parallel print " +> Joining up boundary segments for part: ", part + 1 # ~~> Joining up boundaries for sub-domains print " ~> main internal segments" self.PINTER[part] = self.joinPairs(self.PINTER[part]) print " ~> main external segments" polyHALO = self.joinPairs(self.PNHALO[part]) polyHALO.extend(self.PINTER[part]) polyHALO = self.joinSegments(polyHALO) print " ~> odd segments" polyODDS = self.joinSegments(self.PNODDS[part]) print " ~> stitching with the odd ones" polyGones = self.tetrisOddSegments(polyHALO, polyODDS) print " ~> final closure" polyCLOSED[part] = self.joinSegments(polyGones) # ~~> Building up the entire picture polyFILTER[part] = self.globalSegments(polyCLOSED[part]) polyGLOSED.extend(polyFILTER[part]) # ~~> Joining up boundaries for the global domain (Note: seems counter productive but is not) polyGLOSED = self.joinSegments(polyGLOSED) if self.isDOMAIN != "": print "\n... Printing the domain split into a series of i2s files" # ~~> Convert node numbers into x,y for part in range(self.NPARTS): print " +> part ", part + 1, " of ", self.NPARTS polyXY = [] for pg in range(len(polyCLOSED[part])): pxy = [] for pt in range(len(polyCLOSED[part][pg])): n = polyCLOSED[part][pg][pt] pxy.append([self.slf.MESHX[n], self.slf.MESHY[n]]) polyXY.append(pxy) # ~~> Write polygons to double check fmti = "00000" + str(part) fmti = fmti[len(fmti) - 5:] fileName = path.join( path.dirname(self.slf.file["name"]), self.isDOMAIN + fmtn + "-" + fmti + ".i2s") putInS(fileName, [], "i2s", polyXY) # ~~> Convert node numbers into x,y polyXY = [] for pg in range(len(polyGLOSED)): pxy = [] for pt in range(len(polyGLOSED[pg])): n = polyGLOSED[pg][pt] pxy.append([self.slf.MESHX[n], self.slf.MESHY[n]]) polyXY.append(pxy) # ~~> Write polygons to double check fileName = path.join(path.dirname(self.slf.file["name"]), self.isDOMAIN + ".i2s") putInS(fileName, [], "i2s", polyXY) print "\n... Final check to the element partitioning" for part in range(self.NPARTS): # this could be done in parallel self.KSPLIT = self.resetPartition(part, self.PINTER[part], self.KSPLIT) if self.isDOMAIN != "": # ~~> This is optional print "\n... Printing the domain split into a SELAFIN" fileRoot, fileExts = path.splitext(self.slf.file["name"]) self.slf.fole.update( {"hook": open(fileRoot + "_PROCS" + fileExts, "wb")}) self.slf.appendHeaderSLF() self.slf.appendCoreTimeSLF(0) VARSOR = self.slf.getVALUES(0) for v in range(self.slf.NVAR): VARSOR[v] = self.NSPLIT self.slf.appendCoreVarsSLF(VARSOR) self.slf.fole["hook"].close() print "\n... Storing the global liquid boundary numbering (NUMLIQ)" # ~~> Implying NUMLIQ and the number NFRLIQ based on the joined-up lines self.clm.setNUMLIQ(polyGLOSED) print "\n... Split the mesh connectivity" # ~~> Preliminary set up for LIKLE, KNOLG and KEMLG by parts LIKLE = dict([(i, []) for i in range(self.NPARTS)]) KELLG = dict([(i, []) for i in range(self.NPARTS)]) KNOLG = dict([(i, []) for i in range(self.NPARTS)]) for part in range(self.NPARTS): print " +> re-ordering IKLE for part ", part + 1 LIKLE[part], KELLG[part], KNOLG[part] = self.getIKLE(part) # ~~> CONLIM file: Preliminary set up of IFAPAR and ISEG for all parts IFAPAR = dict([(i, {}) for i in range(self.NPARTS)]) ISEG = {} # Organising ISEG for easier call: part 1 for part in range(self.NPARTS): for i in polyFILTER[part]: if i[0] == i[len(i) - 1]: continue # /!\ you are here adding one ! if i[0] in ISEG: ISEG[i[0]].update({part: i[1] + 1}) else: ISEG.update({i[0]: {part: i[1] + 1}}) if i[len(i) - 1] in ISEG: ISEG[i[len(i) - 1]].update({part: -i[len(i) - 2] - 1}) else: ISEG.update({i[len(i) - 1]: {part: -i[len(i) - 2] - 1}}) # Switching parts of ISEG for final call: part 2 for i in ISEG: if len(ISEG[i]) != 2: print "... You have a boundary node surounded with more than two boundary segments: ", i sys.exit(1) parts = ISEG[i].keys() ISEG[i] = { parts[0]: ISEG[i][parts[1]], parts[1]: ISEG[i][parts[0]] } # ~~> CONLIM file: Preliminary set up of NPTIR for all parts NPTIR = dict([(i, {}) for i in range(self.NPARTS)]) for part in range(self.NPARTS): for p in self.PINTER[part]: NPTIR[part].update(dict([(i, []) for i in p])) parts = range(self.NPARTS) while parts != []: part = parts[0] parts.pop(0) for ip in NPTIR[part]: for ipart in parts: if ip in NPTIR[ipart]: NPTIR[part][ip].append(ipart) NPTIR[ipart][ip].append(part) print "... Split of the SELAFIN file" for part in range(self.NPARTS): fmti = "00000" + str(part) fmti = fmti[len(fmti) - 5:] print " +> part ", part + 1, " of ", self.NPARTS self.slfn.IKLE2 = LIKLE[part] self.slfn.NELEM2 = len(LIKLE[part]) self.slfn.NPOIN2 = len(KNOLG[part]) # ~~> IPARAM has two new values: 8:NPTFR and 9:NPTIR self.slfn.IPARAM[7] = len( np.unique(np.concatenate(polyFILTER[part]))) self.slfn.IPARAM[8] = len(NPTIR[part]) # ~~> IPOBO (or IRAND) converted into KNOLG[part] self.slfn.IPOBO = KNOLG[part] + 1 print " ~> filtering the MESH" # ~~> GEO file: MESH coordinates self.slfn.MESHX = np.zeros(self.slfn.NPOIN2, dtype=np.float32) self.slfn.MESHY = np.zeros(self.slfn.NPOIN2, dtype=np.float32) self.slfn.MESHX = self.slf.MESHX[KNOLG[part]] self.slfn.MESHY = self.slf.MESHY[KNOLG[part]] # ~~> GEO file: File names fileRoot, fileExts = path.splitext(self.slf.file["name"]) self.slfn.file["name"] = fileRoot + fmtn + "-" + fmti + fileExts # ~~> GEO file: Printing print " ~> printing: ", self.slfn.file["name"] self.slfn.fole.update({"hook": open(self.slfn.file["name"], "wb")}) self.slfn.appendHeaderSLF() LVARSOR = np.zeros((self.slfn.NVAR, self.slfn.NPOIN2), dtype=np.float32) for t in range(len(self.slf.tags["times"])): self.slfn.appendCoreTimeSLF(t) VARSOR = self.slf.getVALUES(t) for v in range(self.slfn.NVAR): LVARSOR[v] = VARSOR[v][KNOLG[part]] self.slfn.appendCoreVarsSLF(LVARSOR) self.slfn.fole["hook"].close() if not self.isCONLIM: return print "\n... Connect elements across internal boundaries (IFAPAR)" for part in range(self.NPARTS): print " +> part ", part + 1, " of ", self.NPARTS # ~~> CONLIM file: Preliminary set up of PEHALO elements accross internal boundaries PEHALO = {} SEHALO = {} # Step 1: find out about the primary elements and loop through IKLE self.NSPLIT *= 0 MASKER = NPTIR[part].keys() self.NSPLIT[MASKER] += 1 print " ~> Assembling primary elements with other side" # Sub Step 1: Assembling all edges from the other sides maxbar = 0 ibar = 0 for ip in range(self.NPARTS): maxbar += len(LIKLE[ip]) pbar = ProgressBar(maxval=maxbar).start() for otherpart in range(self.NPARTS): if otherpart == part: continue # all parts are still positive at this stage for k in range(len(LIKLE[otherpart])): ibar += 1 e = self.slf.IKLE[KELLG[otherpart][k]] if np.count_nonzero(self.NSPLIT[e]) < 2: continue for p1, p2 in zip([1, 2, 0], [ 0, 1, 2 ]): # reverse order because looking from the other side if self.NSPLIT[e[p1]] > 0 and self.NSPLIT[e[p2]] > 0: if not (e[p1], e[p2]) in PEHALO: PEHALO.update({(e[p1], e[p2]): [0, []]}) PEHALO[(e[p1], e[p2])][1].append(k) PEHALO[(e[p1], e[p2])][1].append(otherpart) pbar.update(ibar) # Sub Step 2: Assembling all edges from the primary side (there are three times more of them) for k in range(len(LIKLE[part])): ibar += 1 j = KELLG[part][k] e = self.slf.IKLE[j] if np.count_nonzero(self.NSPLIT[e]) < 2: continue for p1, p2, p3 in zip([0, 1, 2], [1, 2, 0], [2, 0, 1]): if self.NSPLIT[e[p1]] > 0 and self.NSPLIT[e[p2]] > 0: if (e[p1], e[p2] ) in PEHALO: # the good side opposes the dark side PEHALO[(e[p1], e[p2])][0] = k if self.NSPLIT[e[p3]] == 0: self.NSPLIT[e[p3]] = -1 if self.NSPLIT[e[p3]] == -1: if not (e[p1], e[p3]) in SEHALO: SEHALO.update({(e[p1], e[p3]): []}) SEHALO[(e[p1], e[p3])].append(k) if not (e[p2], e[p3]) in SEHALO: SEHALO.update({(e[p2], e[p3]): []}) SEHALO[(e[p2], e[p3])].append(k) else: # self.NSPLIT[e[p3]] must be 2 ! if not (e[p3], e[p1]) in SEHALO: SEHALO.update({(e[p3], e[p1]): []}) if k not in SEHALO[(e[p3], e[p1])]: SEHALO[(e[p3], e[p1])].append(k) if not (e[p2], e[p3]) in SEHALO: SEHALO.update({(e[p2], e[p3]): []}) if k not in SEHALO[(e[p2], e[p3])]: SEHALO[(e[p2], e[p3])].append(k) if self.KSPLIT[j] >= 0: self.KSPLIT[j] = -( self.KSPLIT[j] + 1 ) # /!\ This is very dangerous but necessary pbar.update(ibar) pbar.finish() # Sub Step 3: Final clean up of the other side ? no need but check later for (ei)[0] == 0 # Step 2: find out about the secondary elements on IKLE ( local LIKLE ? ) print " ~> Assembling secondary elements of that side" pbar = ProgressBar(maxval=len(LIKLE[part])).start() for k in range(len(LIKLE[part])): j = KELLG[part][k] e = self.slf.IKLE[j] if self.KSPLIT[j] != part: continue if np.count_nonzero(self.NSPLIT[e]) < 2: continue for i in [0, 1, 2]: ii = (i + 1) % 3 if self.NSPLIT[e[i]] > 0 and self.NSPLIT[e[ii]] < 0 and ( e[i], e[ii]) in SEHALO: SEHALO[(e[i], e[ii])].append(k) # correct orientation if self.NSPLIT[e[i]] > 0 and self.NSPLIT[e[ii]] > 0 and ( e[ii], e[i]) in SEHALO: SEHALO[(e[ii], e[i])].append(k) # opposite orientation ii = (i + 2) % 3 if self.NSPLIT[e[i]] > 0 and self.NSPLIT[e[ii]] < 0 and ( e[i], e[ii]) in SEHALO: SEHALO[(e[i], e[ii])].append(k) # correct orientation if self.NSPLIT[e[i]] > 0 and self.NSPLIT[e[ii]] > 0 and ( e[i], e[ii]) in SEHALO: SEHALO[(e[i], e[ii])].append(k) # opposite orientation if self.KSPLIT[j] < 0: self.KSPLIT[ j] = -self.KSPLIT[j] - 1 # /!\ back to a safe place pbar.update(k) pbar.finish() # Step 3: finally cross reference information between SEHALO and PEHALO print " ~> Combining sides surrounding the halo-elements" for ie in PEHALO: if PEHALO[ie][0] == 0: continue k = PEHALO[ie][0] # element number in its local part numbering if not k in IFAPAR[part]: IFAPAR[part].update({k: [-2, -1, -2, -1, -2, -1]}) j = KELLG[part][k] e = self.slf.IKLE[j] for p1, p2 in zip([0, 1, 2], [1, 2, 0]): if (e[p1], e[p2]) in SEHALO: if len(SEHALO[(e[p1], e[p2])]) > 1: if SEHALO[(e[p1], e[p2])][0] == k: IFAPAR[part][k][2 * p1] = SEHALO[(e[p1], e[p2])][1] if SEHALO[(e[p1], e[p2])][1] == k: IFAPAR[part][k][2 * p1] = SEHALO[(e[p1], e[p2])][0] IFAPAR[part][k][1 + 2 * p1] = part if (e[p2], e[p1]) in SEHALO: if len(SEHALO[(e[p2], e[p1])]) > 1: if SEHALO[(e[p2], e[p1])][0] == k: IFAPAR[part][k][2 * p1] = SEHALO[(e[p2], e[p1])][1] if SEHALO[(e[p2], e[p1])][1] == k: IFAPAR[part][k][2 * p1] = SEHALO[(e[p2], e[p1])][0] IFAPAR[part][k][1 + 2 * p1] = part if ie == (e[p1], e[p2]): IFAPAR[part][k][2 * p1] = PEHALO[ie][1][0] IFAPAR[part][k][1 + 2 * p1] = PEHALO[ie][1][1] # ~~> CONLIM file: Write to file ... pfuuuuuh ... this is it ! print "\n... Split of the CONLIM files" for part in range(self.NPARTS): fmti = "00000" + str(part) fmti = fmti[len(fmti) - 5:] print " +> part: ", part + 1, " of ", self.NPARTS # ~~> CONLIM file: Set the filter INDEX = np.zeros_like(self.clm.INDEX, dtype=np.int) for contour in polyFILTER[part]: # ~~> Closed contour: no need to change ISEG if contour[0] == contour[len(contour) - 1]: for c in contour[1:]: INDEX[self.clm.KFRGL[c]] = self.clm.KFRGL[c] + 1 # ~~> Open contour: need to change ISEG with neighbours else: for c in contour[0:]: INDEX[self.clm.KFRGL[c]] = self.clm.KFRGL[c] + 1 iA = self.clm.KFRGL[contour[0]] self.clm.POR["is"][iA] = ISEG[contour[0]][part] self.clm.POR["xs"][iA] = self.slf.MESHX[abs( ISEG[contour[0]][part]) - 1] # /!\ MESHX start at 0 self.clm.POR["ys"][iA] = self.slf.MESHY[abs( ISEG[contour[0]][part]) - 1] # /!\ MESHY start at 0 iA = self.clm.KFRGL[contour[len(contour) - 1]] self.clm.POR["is"][iA] = ISEG[contour[len(contour) - 1]][part] self.clm.POR["xs"][iA] = self.slf.MESHX[ abs(ISEG[contour[len(contour) - 1]][part]) - 1] self.clm.POR["ys"][iA] = self.slf.MESHY[ abs(ISEG[contour[len(contour) - 1]][part]) - 1] self.clm.INDEX = INDEX # ~~> CONLIM file: Set the NPTIR and CUTs self.clm.NPTIR = NPTIR[part] # ~~> CONLIM file: Set the IFAPAR self.clm.IFAPAR = IFAPAR[part] # ~~> CONLIM file fileRoot, fileExts = path.splitext(self.clm.fileName) print " ~> printing: ", fileRoot + fmtn + "-" + fmti + fileExts self.clm.putContent(fileRoot + fmtn + "-" + fmti + fileExts) return
class Dumper2D(Caster): def __init__(self, caster, dump): Caster.__init__(self, { 'object': caster.object, 'obdata': caster.obdata }) self.obtype = dump['saveas'] # the type of file, 'slf' most probably self.oudata = None # the loaded SELAFIN object itself, most probably #self.obdump = dumpSELAFIN() def add(self, typl, what): Caster.add(self, typl, what) # ~~> output from for 2D file if self.obtype == 'slf': #self.obdump.add(self.object[what['file']]) cast = self.get(typl, what) support = cast.support values = cast.values if len(support) != 3: print '... not enough information to save as 2d variable' sys.exit(1) obj = self.object[what['file']] # ~~ SELAFIN header ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ if not self.oudata: self.oudata = SELAFIN('') # create the out header self.oudata.TITLE = '' # TODO: pass it on from what and deco self.oudata.NBV1 = 0 self.oudata.VARNAMES = [] self.oudata.VARUNITS = [] self.oudata.IPARAM = obj.IPARAM self.oudata.IPARAM[6] = 1 # 3D being forced to 2D self.oudata.NDP2 = len(support[2][0]) if np.all([obj.IKLE2, support[2]]): self.oudata.IKLE2 = support[3] self.oudata.IPOB2 = np.zeros(len(supoort[0]), dtype=np.int) self.oudata.MESHX = support[0] self.oudata.MESHY = support[1] else: self.oudata.IKLE2 = obj.IKLE2 self.oudata.IPOB2 = obj.IPOB2 # IPOBO missing from support self.oudata.MESHX = obj.MESHX self.oudata.MESHY = obj.MESHY self.oudata.NELEM2 = len(self.oudata.IKLE2) self.oudata.NPOIN2 = len(self.oudata.MESHX) self.oudata.NELEM3 = self.oudata.NELEM2 self.oudata.NPOIN3 = self.oudata.NPOIN2 self.oudata.NDP3 = self.oudata.NDP2 self.oudata.NPLAN = 1 vars, vtypes = whatVarsSLF(what['vars'], obj.VARNAMES) self.oudata.NBV1 = self.oudata.NBV1 + len(vars[0]) self.oudata.NBV2 = 0 self.oudata.NVAR = self.oudata.NBV1 + self.oudata.NBV2 self.oudata.CLDNAMES = [] self.oudata.CLDUNITS = [] self.oudata.VARINDEX = range(self.oudata.NVAR) for ivar, ival in zip(vars[0], range(len(vars[0]))): self.oudata.VARNAMES.append(obj.VARNAMES[ivar]) self.oudata.VARUNITS.append(obj.VARUNITS[ivar]) self.obdata.update({obj.VARNAMES[ivar]: [values[ival]]}) if max(self.oudata.IPARAM[9], obj.IPARAM[9]) > 0: if self.oudata.DATETIME != obj.DATETIME: self.oudata.IPARAM[9] = 0 if self.oudata.NELEM2 != obj.NELEM2 or self.oudata.NPOIN2 != obj.NPOIN2: print '... mismatch between the 2D sizes of layers of a same save2d object ' sys.exit(1) self.oudata.IKLE3 = self.oudata.IKLE2 self.oudata.IPOB3 = self.oudata.IPOB2 # ~~> unkonwn else: # TODO: raise exception print '... do not know how to write to this format: ' + self.obtype sys.exit(1) def save(self, fileName): # gather common information for the final header if self.obtype == 'slf': self.oudata.fole = {} self.oudata.fole.update({'name': fileName}) self.oudata.fole.update( {'endian': ">"}) # "<" means little-endian, ">" means big-endian self.oudata.fole.update({'float': ('f', 4)}) #'f' size 4, 'd' = size 8 self.oudata.fole.update({'hook': open(fileName, 'wb')}) self.oudata.appendHeaderSLF() self.oudata.appendCoreTimeSLF(0.0) # TODO: recover track of time for ivar in self.oudata.VARNAMES: self.oudata.appendCoreVarsSLF(self.obdata[ivar]) self.oudata.fole['hook'].close() # ~~> unkonwn else: # TODO: raise exception print '... do not know how to write to this format: ' + self.obtype sys.exit(1)
slf2d.tags['cores'].append(0) slf2d.tags['times'].append(0) #slf2d.tags = { 'times':[0] } # time (sec) #slf2d.DATETIME = sel.DATETIME slf2d.DATETIME = [2015, 1, 1, 1, 1, 1] #slf2d.tags = { 'cores':[long(0)] } # time frame #print ' +> Write SELAFIN headers' slf2d.fole.update({ 'hook': open(slf_file,'w') }) slf2d.fole.update({ 'name': 'Converted from gmsh by pputils' }) slf2d.fole.update({ 'endian': ">" }) # big endian slf2d.fole.update({ 'float': ('f',4) }) # single precision slf2d.appendHeaderSLF() slf2d.appendCoreTimeSLF(0) slf2d.appendCoreVarsSLF([z]) # to write the *.cli file # to create the *.cli output file cli_file = slf_file[0:len(slf_file)-4] + str(".cli") fout = open(cli_file,"w") for i in range(len(b)): fout.write(str('2 2 2 0.000 0.000 0.000 0.000 2 0.000 0.000 0.000 ') + str(slf2d.IPOB2[i]+1) + " " + str(i+1) + " " + "\n")