def _isMine(path): testf = OpenRecordFile(path) if testf.record_size == 80: testf.next() if testf.record_size == 16: testf.next() if testf.record_size == 28: return True return False
def _isMine(path): testf = OpenRecordFile(path) if testf.record_size == 80: testf.next() if testf.record_size == 16: testf.next() if testf.record_size == 28: return True return False
def __init__(self, rffile, rows, cols): rf = OpenRecordFile(rffile) self.__time_hdr_fmts = {12: "fii", 8: "fi"}[rf.record_size] self.__time_hdr_fmts_size = rf.record_size self.STIME, self.SDATE = rf.unpack("fi") rf.next() lays = 1 record_size = rf.record_size while rf.record_size == record_size: lays += 1 rf.next() self.__dummy_length = (rf.record_size + 8) / 4 lays //= 2 record = rows * cols * 4 + 8 total_size = self.__dummy_length times = 0 while total_size < rf.length: times += 1 total_size += record * 2 * lays + self.__time_hdr_fmts_size + 8 times -= 1 self.variables = OrderedDict del rf self.createDimension('TSTEP', times) self.createDimension('DATE-TIME', 2) self.createDimension('LAY', lays) self.createDimension('ROW', rows) self.createDimension('COL', cols) self.createDimension('VAR', 2) self.NVARS = len(self.dimensions['VAR']) self.NLAYS = len(self.dimensions['LAY']) self.NROWS = len(self.dimensions['ROW']) self.NCOLS = len(self.dimensions['COL']) self.FTYPE = 1 self.__memmap = memmap(rffile, '>f', 'r', offset=0) if self.__time_hdr_fmts_size == 12: self.LSTAGGER = self.__memmap[3].view('i') else: self.LSTAGGER = nan self.variables = PseudoNetCDFVariables(self.__variables, ['TFLAG', 'U', 'V'])
def __init__(self,rffile,rows,cols): rf=OpenRecordFile(rffile) self.__time_hdr_fmts={12: "fii", 8: "fi"}[rf.record_size] self.__time_hdr_fmts_size=rf.record_size self.STIME,self.SDATE=rf.unpack("fi") rf.next() lays=1 record_size=rf.record_size while rf.record_size==record_size: lays+=1 rf.next() self.__dummy_length=(rf.record_size+8)/4 lays//=2 record=rows*cols*4+8 total_size=self.__dummy_length times=0 while total_size<rf.length: times+=1 total_size+=record*2*lays+self.__time_hdr_fmts_size+8 times-=1 self.variables=OrderedDict del rf self.createDimension('TSTEP',times) self.createDimension('DATE-TIME',2) self.createDimension('LAY',lays) self.createDimension('ROW',rows) self.createDimension('COL',cols) self.createDimension('VAR',2) self.NVARS=len(self.dimensions['VAR']) self.NLAYS=len(self.dimensions['LAY']) self.NROWS=len(self.dimensions['ROW']) self.NCOLS=len(self.dimensions['COL']) self.FTYPE=1 self.__memmap=memmap(rffile,'>f','r',offset=0) if self.__time_hdr_fmts_size==12: self.LSTAGGER=self.__memmap[3].view('i') else: self.LSTAGGER=nan self.variables=PseudoNetCDFVariables(self.__variables,['TFLAG','U','V'])
class point_source(PseudoNetCDFFile): """ point_source provides a PseudoNetCDF interface for CAMx point_source files. Where possible, the inteface follows IOAPI conventions (see www.baronams.com). ex: >>> point_source_path = 'camx_point_source.bin' >>> rows,cols = 65,83 >>> point_sourcefile = point_source(point_source_path,rows,cols) >>> point_sourcefile.variables.keys() ['TFLAG', 'ETFLAG', 'TFLAG', 'XSTK', 'YSTK', 'HSTK', 'DSTK', 'TSTK', 'VSTK', 'KCELL', 'FLOW', 'PLMHT', 'NSTKS', 'NO', 'NO2', ...] >>> tflag = point_sourcefile.variables['TFLAG'] >>> tflag.dimensions ('TSTEP', 'VAR', 'DATE-TIME') >>> tflag[0,0,:] array([2005185, 0]) >>> tflag[-1,0,:] array([2005185, 240000]) >>> v = point_sourcefile.variables['XSTK'] >>> v.dimensions ('NSTK',) >>> v.shape (38452,) >>> v = point_sourcefile.variables['NO2'] >>> v.dimensions ('TSTEP', 'NSTK') >>> v.shape (25, 38452) >>> point_sourcefile.dimensions {'TSTEP': 25, 'NSTK': 38452} """ emiss_hdr_fmt = "10i60i3ifif" grid_hdr_fmt = "ffiffffiiiiifff" cell_hdr_fmt = "iiii" time_hdr_fmt = "ifif" spc_fmt = "10i" nstk_hdr_fmt = "ii" padded_nstk_hdr_size = struct.calcsize("ii" + nstk_hdr_fmt) padded_time_hdr_size = struct.calcsize("ii" + time_hdr_fmt) stk_hdr_fmt = "ffffff" id_fmt = "i" + spc_fmt id_size = struct.calcsize(id_fmt) data_fmt = "f" stkprops = ['XSTK', 'YSTK', 'HSTK', 'DSTK', 'TSTK', 'VSTK'] stktimeprops = ['KCELL', 'FLOW', 'PLMHT'] def __init__(self, rf): """ Initialization included reading the header and learning about the format. see __readheader and __gettimestep() for more info """ self.rffile = OpenRecordFile(rf) self.padded_time_hdr_size = struct.calcsize(self.time_hdr_fmt + "ii") self.__readheader() self.__gettimestep() self.__gettimeprops() self.createDimension('TSTEP', self.time_step_count) self.createDimension('STK', self.nstk) varkeys = [ 'XSTK', 'YSTK', 'HSTK', 'DSTK', 'TSTK', 'VSTK', 'KCELL', 'FLOW', 'PLMHT' ] + [i.strip() for i in self.spcnames] self.variables = PseudoNetCDFVariables(self.__var_get, varkeys) def __var_get(self, key): constr = self.__variables decor = lambda *args, **kwds: {'notread': 1} values = constr(key) if key in self.stkprops: var = self.createVariable(key, 'f', ('STK', )) else: var = self.createVariable(key, 'f', ('TSTEP', 'STK')) var[:] = values for k, v in decor(key).items(): setattr(var, k, v) return var def __variables(self, k): if k in self.stkprops: return array(self.stk_props)[:, self.stkprops.index(k)] elif k in self.stktimeprops: return array(self.stk_time_props)[:, :, 2:][:, :, self.stktimeprops.index(k)] else: return self.getArray()[:, self.spcnames.index(k.ljust(10)), :] def header(self): rdum = 0. idum = 0 ione = 1 return [[ self.name, self.note, ione, self.nspec, self.start_date, self.start_time, self.end_date, self.end_time ], [ rdum, rdum, self.iutm, self.xorg, self.yorg, self.delx, self.dely, self.nx, self.ny, self.nz, idum, idum, rdum, rdum, rdum ], [ione, ione, self.nx, self.ny], self.spcnames, [ione, self.nstk], self.stk_props, self.stk_time_props] def __readheader(self): """ __readheader reads the header section of the ipr file it initializes each header field (see CAMx Users Manual for a list) as properties of the ipr class """ vals = self.rffile.read(self.emiss_hdr_fmt) self.name, self.note, ione, self.nspec, self.start_date, self.start_time, self.end_date, self.end_time = vals[ 0:10], vals[10:70], vals[70], vals[71], vals[72], vals[73], vals[ 74], vals[75] rdum, rdum, self.iutm, self.xorg, self.yorg, self.delx, self.dely, self.nx, self.ny, self.nz, idum, idum, rdum, rdum, rdum = self.rffile.read( self.grid_hdr_fmt) if self.nz == 0: #Special case of gridded emissions #Seems to be same as avrg self.nlayers = 1 else: self.nlayers = self.nz ione, ione, nx, ny = self.rffile.read(self.cell_hdr_fmt) if not (self.nx, self.ny) == (nx, ny): raise ValueError( "nx, ny defined first as %i, %i and then as %i, %i" % (self.nx, self.ny, nx, ny)) species_temp = self.rffile.read(self.nspec * self.spc_fmt) self.spcnames = [] for i in range(0, self.nspec * 10, 10): self.spcnames.append(Int2Asc(species_temp[i:i + 10])) ione, self.nstk = self.rffile.read(self.nstk_hdr_fmt) stkprms = zeros((self.nstk * len(self.stk_hdr_fmt), ), 'f') read_into(self.rffile, stkprms, '') self.rffile.next() #self.rffile.previous() #self.tmplist=self.rffile.read('ffffff'*self.nstk) stkprms = stkprms.reshape((self.nstk, len(self.stk_hdr_fmt))) for i in range(stkprms.shape[0]): if stkprms[i, -1] == array_nan: stkprms[i, -1] = float('-nan') self.stk_props = stkprms.tolist() self.data_start_byte = self.rffile.record_start self.start_date, self.start_time, end_date, end_time = self.rffile.read( self.time_hdr_fmt) self.time_step = timediff((self.start_date, self.start_time), (end_date, end_time)) self.end_time += self.time_step self.time_step_count = int( timediff((self.start_date, self.start_time), (self.end_date, self.end_time), (2400, 24)[int(self.time_step % 2)]) / self.time_step) self.stk_time_prop_fmt = "" + ("iiiff" * self.nstk) self.padded_stk_time_prop_size = struct.calcsize( "ii" + self.stk_time_prop_fmt) self.record_fmt = ("i10i") + self.data_fmt * (self.nstk) self.record_size = struct.calcsize(self.record_fmt) self.padded_size = self.record_size + 8 def __gettimestep(self): """ this is taken care of in the readheader routine record format provides start and end for each hour, which translates to t1 and t2 """ pass def __gettimeprops(self): self.stk_time_props = [] for ti, (d, t) in enumerate( timerange((self.start_date, self.start_time), (self.end_date, self.end_time), self.time_step, (2400, 24)[int(self.time_step % 2)])): tmpprop = zeros((len(self.stk_time_prop_fmt)), 'f') tmpprop[...] = self.seekandread(d, t, 1, True, self.stk_time_prop_fmt) tmpprop = tmpprop.reshape(self.nstk, 5) for i in range(tmpprop.shape[0]): if tmpprop[i, -2] == array_nan: tmpprop[i, -2] = float('-nan') self.stk_time_props.append(tmpprop.tolist()) def __timerecords(self, dt): """ Calculate the number of records to increment to reach time (d,t) """ d, t = dt nsteps = timediff((self.start_date, self.start_time), (d, t), (2400, 24)[int(self.time_step % 2)]) nspec = self.__spcrecords(self.nspec + 1) return nsteps * (nspec) def __spcrecords(self, spc): """ Calculated number of records before spc """ return spc - 1 def __recordposition(self, date, time, spc, offset=False): """ Use time (d,t), spc, and k to calculate number of records before desired record date - integer julian time - float spc - integer """ ntime = self.__timerecords((date, time)) nhdr = ((ntime / self.__spcrecords(self.nspec + 1)) + 1) nspc = self.__spcrecords(spc) noffset = -abs(int(offset)) byte = self.data_start_byte byte += nhdr * (self.padded_time_hdr_size + self.padded_nstk_hdr_size + self.padded_stk_time_prop_size) byte += (ntime + nspc) * self.padded_size byte += noffset * self.padded_stk_time_prop_size return byte def seek(self, date=None, time=None, spc=-1, offset=False): """ Move file cursor to the beginning of the specified record see __recordposition for parameter definitions """ #chkvar=True #if chkvar and timediff((self.end_date,self.end_time),(date,time),24)>0 or timediff((self.start_date,self.start_time),(date,time),24)<0: # raise KeyError("Point emission file includes (%i,%6.1f) thru (%i,%6.1f); you requested (%i,%6.1f)" % (self.start_date,self.start_time,self.end_date,self.end_time,date,time)) #if chkvar and spc<1 or spc>self.nspec: # raise KeyError("Point emission file include species 1 thru %i; you requested %i" % (self.nspec,spc)) self.rffile._newrecord(self.__recordposition(date, time, spc, offset)) def read(self, fmt=None): """ Provide direct access to record file read """ if fmt == None: fmt = self.record_fmt return self.rffile.read(fmt) def read_into(self, dest): """ Transfer values from current record to dest dest - numeric or numpy array """ return read_into(self.rffile, dest, self.id_fmt, self.data_fmt) def seekandreadinto(self, dest, date=None, time=None, spc=1): """ see seek and read_into """ self.seek(date, time, spc) self.read_into(dest) def seekandread(self, date=None, time=None, spc=1, offset=False, fmt=None): """ see seek and read """ self.seek(date, time, spc, offset) return self.read(fmt) def values(self): for d, t, spc in self.__iter__(): yield self.seekandread(d, t, spc) def items(self): for d, t, spc in self.__iter__(): yield d, t, spc, self.seekandread(d, t, spc) def keys(self): for ti, (d, t) in enumerate(self.timerange()): for spc in range(1, len(self.spcnames) + 1): yield d, t, spc __iter__ = keys def getArray(self): a = zeros((self.time_step_count, self.nspec, self.nstk), 'f') for ti, (d, t) in enumerate(self.timerange()): for spc in range(1, len(self.spcnames) + 1): self.seekandreadinto(a[ti, spc - 1, ...], d, t, spc) return a.copy() def timerange(self): return timerange((self.start_date, self.start_time), (self.end_date, self.end_time), self.time_step, eod=24)
class height_pressure(PseudoNetCDFFile): """ height_pressure provides a PseudoNetCDF interface for CAMx height_pressure files. Where possible, the inteface follows IOAPI conventions (see www.baronams.com). ex: >>> height_pressure_path = 'camx_height_pressure.bin' >>> rows,cols = 65,83 >>> height_pressurefile = height_pressure(height_pressure_path,rows,cols) >>> height_pressurefile.variables.keys() ['TFLAG', 'HGHT', 'PRES'] >>> v = height_pressurefile.variables['V'] >>> tflag = height_pressurefile.variables['TFLAG'] >>> tflag.dimensions ('TSTEP', 'VAR', 'DATE-TIME') >>> tflag[0,0,:] array([2005185, 0]) >>> tflag[-1,0,:] array([2005185, 240000]) >>> v.dimensions ('TSTEP', 'LAY', 'ROW', 'COL') >>> v.shape (25, 28, 65, 83) >>> height_pressurefile.dimensions {'TSTEP': 25, 'LAY': 28, 'ROW': 65, 'COL': 83} """ id_fmt = "fi" data_fmt = "f" def __init__(self, rf, rows, cols): """ Initialization included reading the header and learning about the format. see __readheader and __gettimestep() for more info """ self.rffile = OpenRecordFile(rf) self.id_size = struct.calcsize(self.id_fmt) self.__readheader() self.__gettimestep() if rows == None and cols == None: rows = self.cell_count cols = 1 elif rows == None: rows = self.cell_count / cols elif cols == None: cols = self.cell_count / rows else: if cols * rows != self.cell_count: raise ValueError( "The product of cols (%d) and rows (%d) must equal cells (%d)" % (cols, rows, self.cell_count)) self.createDimension('TSTEP', self.time_step_count) self.createDimension('ROW', rows) self.createDimension('COL', cols) self.createDimension('LAY', self.nlayers) self.variables = PseudoNetCDFVariables(self.__var_get, ['HGHT', 'PRES']) def __var_get(self, key): constr = lambda hp: self.getArray({'HGHT': 0, 'PRES': 1}[hp]) decor = lambda hp: { 'HGHT': dict(units='m', var_desc='HGHT'.ljust(16), long_name='HGHT'.ljust(16)), 'PRES': dict(units='hPa', var_desc='PRES'.ljust(16), long_name='PRES'.ljust(16)) }[hp] values = constr(key) var = self.createVariable(key, 'f', ('TSTEP', 'LAY', 'ROW', 'COL')) var[:] = values for k, v in decor(key).items(): setattr(var, k, v) return var def __readheader(self): """ __readheader reads the header section of the ipr file it initializes each header field (see CAMx Users Manual for a list) as properties of the ipr class """ self.data_start_byte = 0 self.start_time, self.start_date = self.rffile.read(self.id_fmt) self.record_size = self.rffile.record_size self.padded_size = self.record_size + 8 self.cell_count = (self.record_size - self.id_size) / struct.calcsize( self.data_fmt) self.record_fmt = self.id_fmt + self.data_fmt * (self.cell_count) def __gettimestep(self): """ Header information provides start and end date, but does not indicate the increment between. This routine reads the first and second date/time and initializes variables indicating the timestep length and the anticipated number. """ self.rffile._newrecord(self.padded_size * 2) d, t = self.start_date, self.start_time self.nlayers = 0 while timediff((self.start_date, self.start_time), (d, t)) == 0: t, d = self.rffile.read(self.id_fmt) self.rffile.next() self.nlayers += 1 self.time_step = timediff((self.start_date, self.start_time), (d, t)) while True: try: self.seek(d, t, self.nlayers, 1, False) d, t = timeadd((d, t), (0, self.time_step)) except: break self.end_date, self.end_time = timeadd((d, t), (0, -self.time_step)) self.time_step_count = int( timediff((self.start_date, self.start_time), (self.end_date, self.end_time)) / self.time_step) + 1 def __timerecords(self, dt): """ routine returns the number of records to increment from the data start byte to find the first time """ d, t = dt nsteps = int( timediff((self.start_date, self.start_time), (d, t)) / self.time_step) nk = self.__layerrecords(self.nlayers + 1) return nsteps * nk def __layerrecords(self, k): """ routine returns the number of records to increment from the data start byte to find the first klayer """ return (k - 1) * 2 def __recordposition(self, date, time, k, hp): """ routine uses timerecords and layerrecords multiplied plus hp by the fortran padded size to return the byte position of the specified record date - integer time - float k - integer hp - integer (0=h,1=p) """ ntime = self.__timerecords((date, time)) nk = self.__layerrecords(k) return (nk + ntime + hp) * self.padded_size + self.data_start_byte def seek(self, date=None, time=None, k=1, hp=0, chkvar=True): """ Move file cursor to specified record """ if date == None: date = self.start_date if time == None: time = self.start_time if chkvar and timediff( (self.end_date, self.end_time), (date, time)) > 0 or timediff( (self.start_date, self.start_time), (date, time)) < 0: raise KeyError( "Vertical Diffusivity file includes (%i,%6.1f) thru (%i,%6.1f); you requested (%i,%6.1f)" % (self.start_date, self.start_time, self.end_date, self.end_time, date, time)) if chkvar and k < 1 or k > self.nlayers: raise KeyError( "Vertical Diffusivity file include layers 1 thru %i; you requested %i" % (self.nlayers, k)) if chkvar and hp < 0 or hp > 1: raise KeyError( "Height pressure or indexed 0 and 1; you requested %i" % (hp)) self.rffile._newrecord(self.__recordposition(date, time, k, hp)) def read(self): """ Call recordfile read method directly """ return self.rffile.read(self.record_fmt) def read_into(self, dest): """ put values from rffile read into dest dest - numpy or numeric array """ return read_into(self.rffile, dest, self.id_fmt, self.data_fmt) def seekandreadinto(self, dest, date=None, time=None, k=1, hp=0): """ see seek and read """ self.seek(date, time, k, hp) return self.read_into(dest) def seekandread(self, date=None, time=None, k=1, hp=0): """ see seek and read """ self.seek(date, time, k, hp) return self.read() def values(self): for d, t, k in self.__iter__(): yield self.seekandread(d, t, k, 0), self.seekandread(d, t, k, 1) def items(self): for d, t, k in self.__iter__(): yield d, t, k, self.seekandread(d, t, k, 0), self.seekandread(d, t, k, 1) def keys(self): for d, t in self.timerange(): for k in range(1, self.nlayers + 1): yield d, t, k __iter__ = keys def getArray(self, hp): a = zeros((self.time_step_count, len(self.dimensions['LAY']), len(self.dimensions['ROW']), len(self.dimensions['COL'])), 'f') for ti, (d, t) in enumerate(self.timerange()): for ki, k in enumerate(xrange(1, self.nlayers + 1)): self.seekandreadinto(a[ti, ki, ..., ...], d, t, k, hp) return a def timerange(self): return timerange( (self.start_date, self.start_time), timeadd((self.end_date, self.end_time), (0, self.time_step), (2400, 24)[int(self.time_step % 2)]), self.time_step, (2400, 24)[int(self.time_step % 2)])
class point_source(PseudoNetCDFFile): """ point_source provides a PseudoNetCDF interface for CAMx point_source files. Where possible, the inteface follows IOAPI conventions (see www.baronams.com). ex: >>> point_source_path = 'camx_point_source.bin' >>> rows,cols = 65,83 >>> point_sourcefile = point_source(point_source_path,rows,cols) >>> point_sourcefile.variables.keys() ['TFLAG', 'ETFLAG', 'TFLAG', 'XSTK', 'YSTK', 'HSTK', 'DSTK', 'TSTK', 'VSTK', 'KCELL', 'FLOW', 'PLMHT', 'NSTKS', 'NO', 'NO2', ...] >>> tflag = point_sourcefile.variables['TFLAG'] >>> tflag.dimensions ('TSTEP', 'VAR', 'DATE-TIME') >>> tflag[0,0,:] array([2005185, 0]) >>> tflag[-1,0,:] array([2005185, 240000]) >>> v = point_sourcefile.variables['XSTK'] >>> v.dimensions ('NSTK',) >>> v.shape (38452,) >>> v = point_sourcefile.variables['NO2'] >>> v.dimensions ('TSTEP', 'NSTK') >>> v.shape (25, 38452) >>> point_sourcefile.dimensions {'TSTEP': 25, 'NSTK': 38452} """ emiss_hdr_fmt="10i60i3ifif" grid_hdr_fmt="ffiffffiiiiifff" cell_hdr_fmt="iiii" time_hdr_fmt="ifif" spc_fmt="10i" nstk_hdr_fmt="ii" padded_nstk_hdr_size=struct.calcsize("ii"+nstk_hdr_fmt) padded_time_hdr_size=struct.calcsize("ii"+time_hdr_fmt) stk_hdr_fmt="ffffff" id_fmt="i"+spc_fmt id_size=struct.calcsize(id_fmt) data_fmt="f" stkprops=['XSTK','YSTK','HSTK','DSTK','TSTK','VSTK'] stktimeprops=['KCELL','FLOW','PLMHT'] def __init__(self,rf): """ Initialization included reading the header and learning about the format. see __readheader and __gettimestep() for more info """ self.rffile=OpenRecordFile(rf) self.padded_time_hdr_size=struct.calcsize(self.time_hdr_fmt+"ii") self.__readheader() self.__gettimestep() self.__gettimeprops() self.createDimension('TSTEP',self.time_step_count) self.createDimension('STK',self.nstk) varkeys=['XSTK','YSTK','HSTK','DSTK','TSTK','VSTK','KCELL','FLOW','PLMHT']+[i.strip() for i in self.spcnames] self.variables=PseudoNetCDFVariables(self.__var_get,varkeys) def __var_get(self,key): constr=self.__variables decor=lambda *args,**kwds: {'notread': 1} values=constr(key) if key in self.stkprops: var=self.createVariable(key,'f',('STK',)) else: var=self.createVariable(key,'f',('TSTEP','STK')) var[:] = values for k,v in decor(key).items(): setattr(var,k,v) return var def __variables(self,k): if k in self.stkprops: return array(self.stk_props)[:,self.stkprops.index(k)] elif k in self.stktimeprops: return array(self.stk_time_props)[:,:,2:][:,:,self.stktimeprops.index(k)] else: return self.getArray()[:,self.spcnames.index(k.ljust(10)),:] def header(self): rdum=0. idum=0 ione=1 return [ [self.name,self.note,ione,self.nspec,self.start_date,self.start_time,self.end_date,self.end_time], [rdum,rdum,self.iutm,self.xorg,self.yorg,self.delx,self.dely,self.nx,self.ny,self.nz,idum,idum,rdum,rdum,rdum], [ione,ione,self.nx,self.ny], self.spcnames, [ione,self.nstk], self.stk_props, self.stk_time_props ] def __readheader(self): """ __readheader reads the header section of the ipr file it initializes each header field (see CAMx Users Manual for a list) as properties of the ipr class """ vals=self.rffile.read(self.emiss_hdr_fmt) self.name,self.note,ione,self.nspec,self.start_date,self.start_time,self.end_date,self.end_time=vals[0:10],vals[10:70],vals[70],vals[71],vals[72],vals[73],vals[74],vals[75] rdum,rdum,self.iutm,self.xorg,self.yorg,self.delx,self.dely,self.nx,self.ny,self.nz,idum,idum,rdum,rdum,rdum=self.rffile.read(self.grid_hdr_fmt) if self.nz==0: #Special case of gridded emissions #Seems to be same as avrg self.nlayers=1 else: self.nlayers=self.nz ione,ione,nx,ny=self.rffile.read(self.cell_hdr_fmt) if not (self.nx,self.ny)==(nx,ny): raise ValueError("nx, ny defined first as %i, %i and then as %i, %i" % (self.nx,self.ny,nx,ny)) species_temp=self.rffile.read(self.nspec*self.spc_fmt) self.spcnames=[] for i in range(0,self.nspec*10,10): self.spcnames.append(Int2Asc(species_temp[i:i+10])) ione,self.nstk=self.rffile.read(self.nstk_hdr_fmt) stkprms=zeros((self.nstk*len(self.stk_hdr_fmt),),'f') read_into(self.rffile,stkprms,'') self.rffile.next() #self.rffile.previous() #self.tmplist=self.rffile.read('ffffff'*self.nstk) stkprms=stkprms.reshape((self.nstk,len(self.stk_hdr_fmt))) for i in range(stkprms.shape[0]): if stkprms[i,-1]==array_nan: stkprms[i,-1]=float('-nan') self.stk_props=stkprms.tolist() self.data_start_byte=self.rffile.record_start self.start_date,self.start_time,end_date,end_time=self.rffile.read(self.time_hdr_fmt) self.time_step=timediff((self.start_date,self.start_time),(end_date,end_time)) self.end_time += self.time_step self.time_step_count=int(timediff((self.start_date,self.start_time),(self.end_date,self.end_time),(2400,24)[int(self.time_step % 2)])/self.time_step) self.stk_time_prop_fmt=""+("iiiff"*self.nstk) self.padded_stk_time_prop_size=struct.calcsize("ii"+self.stk_time_prop_fmt) self.record_fmt=("i10i")+self.data_fmt*(self.nstk) self.record_size=struct.calcsize(self.record_fmt) self.padded_size=self.record_size+8 def __gettimestep(self): """ this is taken care of in the readheader routine record format provides start and end for each hour, which translates to t1 and t2 """ pass def __gettimeprops(self): self.stk_time_props=[] for ti,(d,t) in enumerate(timerange((self.start_date,self.start_time),(self.end_date,self.end_time),self.time_step,(2400,24)[int(self.time_step % 2)])): tmpprop=zeros((len(self.stk_time_prop_fmt)),'f') tmpprop[...]=self.seekandread(d,t,1,True,self.stk_time_prop_fmt) tmpprop=tmpprop.reshape(self.nstk,5) for i in range(tmpprop.shape[0]): if tmpprop[i,-2]==array_nan: tmpprop[i,-2]=float('-nan') self.stk_time_props.append(tmpprop.tolist()) def __timerecords(self,dt): """ Calculate the number of records to increment to reach time (d,t) """ d, t = dt nsteps=timediff((self.start_date,self.start_time),(d,t),(2400,24)[int(self.time_step % 2)]) nspec=self.__spcrecords(self.nspec+1) return nsteps*(nspec) def __spcrecords(self,spc): """ Calculated number of records before spc """ return spc-1 def __recordposition(self,date,time,spc,offset=False): """ Use time (d,t), spc, and k to calculate number of records before desired record date - integer julian time - float spc - integer """ ntime=self.__timerecords((date,time)) nhdr=((ntime/self.__spcrecords(self.nspec+1))+1) nspc=self.__spcrecords(spc) noffset=-abs(int(offset)) byte=self.data_start_byte byte+=nhdr*(self.padded_time_hdr_size+self.padded_nstk_hdr_size+self.padded_stk_time_prop_size) byte+=(ntime+nspc)*self.padded_size byte+=noffset*self.padded_stk_time_prop_size return byte def seek(self,date=None,time=None,spc=-1,offset=False): """ Move file cursor to the beginning of the specified record see __recordposition for parameter definitions """ #chkvar=True #if chkvar and timediff((self.end_date,self.end_time),(date,time),24)>0 or timediff((self.start_date,self.start_time),(date,time),24)<0: # raise KeyError("Point emission file includes (%i,%6.1f) thru (%i,%6.1f); you requested (%i,%6.1f)" % (self.start_date,self.start_time,self.end_date,self.end_time,date,time)) #if chkvar and spc<1 or spc>self.nspec: # raise KeyError("Point emission file include species 1 thru %i; you requested %i" % (self.nspec,spc)) self.rffile._newrecord(self.__recordposition(date,time,spc,offset)) def read(self,fmt=None): """ Provide direct access to record file read """ if fmt==None: fmt=self.record_fmt return self.rffile.read(fmt) def read_into(self,dest): """ Transfer values from current record to dest dest - numeric or numpy array """ return read_into(self.rffile,dest,self.id_fmt,self.data_fmt) def seekandreadinto(self,dest,date=None,time=None,spc=1): """ see seek and read_into """ self.seek(date,time,spc) self.read_into(dest) def seekandread(self,date=None,time=None,spc=1,offset=False,fmt=None): """ see seek and read """ self.seek(date,time,spc,offset) return self.read(fmt) def values(self): for d,t,spc in self.__iter__(): yield self.seekandread(d,t,spc) def items(self): for d,t,spc in self.__iter__(): yield d,t,spc,self.seekandread(d,t,spc) def keys(self): for ti,(d,t) in enumerate(self.timerange()): for spc in range(1,len(self.spcnames)+1): yield d,t,spc __iter__=keys def getArray(self): a=zeros((self.time_step_count,self.nspec,self.nstk),'f') for ti,(d,t) in enumerate(self.timerange()): for spc in range(1,len(self.spcnames)+1): self.seekandreadinto(a[ti,spc-1,...],d,t,spc) return a.copy() def timerange(self): return timerange((self.start_date,self.start_time),(self.end_date,self.end_time),self.time_step,eod=24)
class wind(PseudoNetCDFFile): """ wind provides a PseudoNetCDF interface for CAMx wind files. Where possible, the inteface follows IOAPI conventions (see www.baronams.com). ex: >>> wind_path = 'camx_wind.bin' >>> rows,cols = 65,83 >>> windfile = wind(wind_path,rows,cols) >>> windfile.variables.keys() ['TFLAG', 'U', 'V'] >>> v = windfile.variables['V'] >>> tflag = windfile.variables['TFLAG'] >>> tflag.dimensions ('TSTEP', 'VAR', 'DATE-TIME') >>> tflag[0,0,:] array([2005185, 0]) >>> tflag[-1,0,:] array([2005185, 240000]) >>> v.dimensions ('TSTEP', 'LAY', 'ROW', 'COL') >>> v.shape (25, 28, 65, 83) >>> windfile.dimensions {'TSTEP': 25, 'LAY': 28, 'ROW': 65, 'COL': 83} """ time_hdr_fmts = {12: "fii", 8: "fi"} data_fmt = "f" def __init__(self, rf, rows=None, cols=None): """ Initialization included reading the header and learning about the format. see __readheader and __gettimestep() for more info """ self.rffile = OpenRecordFile(rf) self.time_hdr_fmt = self.time_hdr_fmts[self.rffile.record_size] self.time_hdr_size = struct.calcsize(self.time_hdr_fmt) self.padded_time_hdr_size = struct.calcsize("ii" + self.time_hdr_fmt) self.__readheader() self.__gettimestep() if rows is None and cols is None: rows = self.cell_count cols = 1 elif rows is None: rows = self.cell_count / cols elif cols is None: cols = self.cell_count / rows else: if cols * rows != self.cell_count: raise ValueError( ("The product of cols (%d) and rows (%d) " + "must equal cells (%d)") % (cols, rows, self.cell_count)) self.createDimension('TSTEP', self.time_step_count) self.createDimension('COL', cols) self.createDimension('ROW', rows) self.createDimension('LAY', self.nlayers) self.variables = PseudoNetCDFVariables(self.__var_get, ['U', 'V']) def __var_get(self, key): def constr(uv): return self.getArray()[:, {'U': 0, 'V': 1}[uv], :, :, :].copy() def decor(uv): return dict(units='m/s', var_desc=uv.ljust(16), long_name=uv.ljust(16)) values = constr(key) var = self.createVariable(key, 'f', ('TSTEP', 'LAY', 'ROW', 'COL')) var[:] = values for k, v in decor(key).items(): setattr(var, k, v) return var def __readheader(self): """ __readheader reads the header section of the ipr file it initializes each header field (see CAMx Users Manual for a list) as properties of the ipr class """ self.data_start_byte = 0 if self.time_hdr_fmt == 'fii': self.start_time, self.start_date, self.lstagger = self.rffile.read( self.time_hdr_fmt) elif self.time_hdr_fmt == 'fi': self.start_time, self.start_date = self.rffile.read( self.time_hdr_fmt) self.lstagger = None else: raise NotImplementedError("Header format is unknown") self.record_size = self.rffile.record_size self.padded_size = self.record_size + 8 self.cell_count = self.record_size // struct.calcsize(self.data_fmt) self.record_fmt = self.data_fmt * self.cell_count def __gettimestep(self): """ Header information provides start and end date, but does not indicate the increment between. This routine reads the first and second date/time and initializes variables indicating the timestep length and the anticipated number. """ # This is a bit of a hack, but should work: # Search for the next record that is the same # length as self.padded_time_hdr_size # # This should be the next date record # 1) date - startdate = timestep # 2) (record_start - self.padded_time_hdr_size)/self.padded_size # = klayers self.rffile._newrecord(0) self.rffile.next() nlayers = 0 while not self.rffile.record_size == self.time_hdr_size: self.rffile.next() nlayers += 1 self.nlayers = (nlayers - 1) // 2 if self.time_hdr_fmt == "fi": time, date = self.rffile.read(self.time_hdr_fmt) elif self.time_hdr_fmt == "fii": time, date, lstagger = self.rffile.read(self.time_hdr_fmt) self.end_time, self.end_date = time, date self.time_step = timediff((self.start_date, self.start_time), (date, time)) while True: try: for i in range(self.nlayers * 2 + 1): self.rffile.next() if self.rffile.record_size == 8: self.end_time, self.end_date = self.rffile.read("fi") elif self.rffile.record_size == 12: self.end_time, self.end_date, lstagger = self.rffile.read( "fii") else: raise KeyError() except Exception: break self.time_step_count = int( timediff((self.start_date, self.start_time), (self.end_date, self.end_time)) / self.time_step) + 1 def __layerrecords(self, k): return k - 1 def __timerecords(self, dt): """ routine returns the number of records to increment from the data start byte to find the first time """ d, t = dt nsteps = int( timediff((self.start_date, self.start_time), (d, t)) / self.time_step) nlays = self.__layerrecords(self.nlayers + 1) return nsteps * nlays def __recordposition(self, date, time, k, duv): """ routine uses pagridrecords, timerecords,irecords, jrecords, and krecords multiplied by the fortran padded size to return the byte position of the specified record pagrid - integer date - integer time - float duv - integer (0=date,1=uwind,2=vwind) """ bytes = self.data_start_byte nsteps = self.__timerecords((date, time)) bytes += int(nsteps / self.nlayers) * self.padded_time_hdr_size bytes += int(nsteps / self.nlayers) * 12 bytes += nsteps * self.padded_size * 2 if not duv == 0: bytes += self.padded_time_hdr_size bytes += self.__layerrecords(k) * 2 * self.padded_size if duv == 2: bytes += self.padded_size return bytes def seek(self, date=None, time=None, k=1, uv=1): """ Move file cursor to beginning of specified record see __recordposition for a definition of variables """ if date is None: date = self.start_date if time is None: time = self.start_time chkvar = True if (chkvar and timediff( (self.end_date, self.end_time), (date, time)) > 0 or timediff( (self.start_date, self.start_time), (date, time)) < 0): raise KeyError(("Wind file includes (%i,%6.1f) thru (%i,%6.1f); " + "you requested (%i,%6.1f)") % (self.start_date, self.start_time, self.end_date, self.end_time, date, time)) if chkvar and uv < 0 or uv > 2: raise KeyError("Wind file includes Date (uv: 0), u velocity " + "(uv: 1) and v velocity (uv: 2); you requested %i" % (uv)) self.rffile._newrecord(self.__recordposition(date, time, k, uv)) def read(self): """ provide direct access to the underlying RecordFile read method """ return self.rffile.read(self.record_fmt) def read_into(self, dest): """ put values from rffile read into dest dest - numpy or numeric array """ return read_into(self.rffile, dest, "", self.data_fmt) def seekandreadinto(self, dest, date=None, time=None, k=1, duv=1): """ see seek and read_into """ self.seek(date, time, k, duv) self.read_into(dest) def seekandread(self, date=None, time=None, k=1, duv=1): """ see seek and read """ self.seek(date, time, k, duv) return self.read() def keys(self): for d, t in timerange((self.start_date, self.start_time), timeadd((self.end_date, self.end_time), (0, self.time_step)), self.time_step): for k in range(1, self.nlayers + 1): yield d, t, k def values(self): for d, t, k in self.keys(): yield self.seekandread(d, t, k, 1), self.seekandread(d, t, k, 2) def items(self): for d, t, k in self.keys(): y1, y2 = self.seekandread(d, t, k, 1), self.seekandread(d, t, k, 2) yield d, t, k, y1, y2 __iter__ = keys def getArray(self, krange=slice(1, None)): if type(krange) != slice: if type(krange) == tuple: krange = slice(*krange) if type(krange) == int: krange = slice(krange, krange + 1) a = zeros(( self.time_step_count, 2, len(range(*krange.indices(self.nlayers + 1))), len(self.dimensions['ROW']), len(self.dimensions['COL']), ), 'f') nlay = self.nlayers for i, (d, t) in enumerate(self.timerange()): for uv in range(1, 3): for ki, k in enumerate(range(*krange.indices(nlay + 1))): uvi = uv - 1 ki = k - 1 self.seekandreadinto(a[i, uvi, ki, :, :], d, t, k, uv) return a def timerange(self): return timerange((self.start_date, self.start_time), timeadd((self.end_date, self.end_time), (0, self.time_step)), self.time_step)
class height_pressure(PseudoNetCDFFile): """ height_pressure provides a PseudoNetCDF interface for CAMx height_pressure files. Where possible, the inteface follows IOAPI conventions (see www.baronams.com). ex: >>> height_pressure_path = 'camx_height_pressure.bin' >>> rows,cols = 65,83 >>> height_pressurefile = height_pressure(height_pressure_path,rows,cols) >>> height_pressurefile.variables.keys() ['TFLAG', 'HGHT', 'PRES'] >>> v = height_pressurefile.variables['V'] >>> tflag = height_pressurefile.variables['TFLAG'] >>> tflag.dimensions ('TSTEP', 'VAR', 'DATE-TIME') >>> tflag[0,0,:] array([2005185, 0]) >>> tflag[-1,0,:] array([2005185, 240000]) >>> v.dimensions ('TSTEP', 'LAY', 'ROW', 'COL') >>> v.shape (25, 28, 65, 83) >>> height_pressurefile.dimensions {'TSTEP': 25, 'LAY': 28, 'ROW': 65, 'COL': 83} """ id_fmt="fi" data_fmt="f" def __init__(self,rf,rows,cols): """ Initialization included reading the header and learning about the format. see __readheader and __gettimestep() for more info """ self.rffile=OpenRecordFile(rf) self.id_size=struct.calcsize(self.id_fmt) self.__readheader() self.__gettimestep() if rows==None and cols==None: rows=self.cell_count cols=1 elif rows==None: rows=self.cell_count/cols elif cols==None: cols=self.cell_count/rows else: if cols*rows!=self.cell_count: raise ValueError("The product of cols (%d) and rows (%d) must equal cells (%d)" % (cols,rows,self.cell_count)) self.createDimension('TSTEP', self.time_step_count) self.createDimension('ROW', rows) self.createDimension('COL', cols) self.createDimension('LAY', self.nlayers) self.variables=PseudoNetCDFVariables(self.__var_get,['HGHT','PRES']) def __var_get(self,key): constr=lambda hp: self.getArray({'HGHT': 0,'PRES': 1}[hp]) decor=lambda hp: {'HGHT': dict(units='m',var_desc='HGHT'.ljust(16),long_name='HGHT'.ljust(16)),'PRES': dict(units='hPa',var_desc='PRES'.ljust(16),long_name='PRES'.ljust(16))}[hp] values=constr(key) var=self.createVariable(key,'f',('TSTEP','LAY','ROW','COL')) var[:] = values for k,v in decor(key).items(): setattr(var,k,v) return var def __readheader(self): """ __readheader reads the header section of the ipr file it initializes each header field (see CAMx Users Manual for a list) as properties of the ipr class """ self.data_start_byte=0 self.start_time,self.start_date=self.rffile.read(self.id_fmt) self.record_size=self.rffile.record_size self.padded_size=self.record_size+8 self.cell_count=(self.record_size-self.id_size)/struct.calcsize(self.data_fmt) self.record_fmt=self.id_fmt+self.data_fmt*(self.cell_count) def __gettimestep(self): """ Header information provides start and end date, but does not indicate the increment between. This routine reads the first and second date/time and initializes variables indicating the timestep length and the anticipated number. """ self.rffile._newrecord( self.padded_size*2 ) d,t=self.start_date,self.start_time self.nlayers=0 while timediff((self.start_date,self.start_time),(d,t))==0: t,d=self.rffile.read(self.id_fmt) self.rffile.next() self.nlayers+=1 self.time_step=timediff((self.start_date,self.start_time),(d,t)) while True: try: self.seek(d,t,self.nlayers,1,False) d,t=timeadd((d,t),(0,self.time_step)) except: break self.end_date,self.end_time=timeadd((d,t),(0,-self.time_step)) self.time_step_count=int(timediff((self.start_date,self.start_time),(self.end_date,self.end_time))/self.time_step)+1 def __timerecords(self,dt): """ routine returns the number of records to increment from the data start byte to find the first time """ d, t = dt nsteps=int(timediff((self.start_date,self.start_time),(d,t))/self.time_step) nk=self.__layerrecords(self.nlayers+1) return nsteps*nk def __layerrecords(self,k): """ routine returns the number of records to increment from the data start byte to find the first klayer """ return (k-1)*2 def __recordposition(self,date,time,k,hp): """ routine uses timerecords and layerrecords multiplied plus hp by the fortran padded size to return the byte position of the specified record date - integer time - float k - integer hp - integer (0=h,1=p) """ ntime=self.__timerecords((date,time)) nk=self.__layerrecords(k) return (nk+ntime+hp)*self.padded_size+self.data_start_byte def seek(self,date=None,time=None,k=1,hp=0,chkvar=True): """ Move file cursor to specified record """ if date==None: date=self.start_date if time==None: time=self.start_time if chkvar and timediff((self.end_date,self.end_time),(date,time))>0 or timediff((self.start_date,self.start_time),(date,time))<0: raise KeyError("Vertical Diffusivity file includes (%i,%6.1f) thru (%i,%6.1f); you requested (%i,%6.1f)" % (self.start_date,self.start_time,self.end_date,self.end_time,date,time)) if chkvar and k<1 or k>self.nlayers: raise KeyError("Vertical Diffusivity file include layers 1 thru %i; you requested %i" % (self.nlayers,k)) if chkvar and hp<0 or hp >1: raise KeyError("Height pressure or indexed 0 and 1; you requested %i" % (hp)) self.rffile._newrecord(self.__recordposition(date,time,k,hp)) def read(self): """ Call recordfile read method directly """ return self.rffile.read(self.record_fmt) def read_into(self,dest): """ put values from rffile read into dest dest - numpy or numeric array """ return read_into(self.rffile,dest,self.id_fmt,self.data_fmt) def seekandreadinto(self,dest,date=None,time=None,k=1,hp=0): """ see seek and read """ self.seek(date,time,k,hp) return self.read_into(dest) def seekandread(self,date=None,time=None,k=1,hp=0): """ see seek and read """ self.seek(date,time,k,hp) return self.read() def values(self): for d,t,k in self.__iter__(): yield self.seekandread(d,t,k,0),self.seekandread(d,t,k,1) def items(self): for d,t,k in self.__iter__(): yield d,t,k,self.seekandread(d,t,k,0),self.seekandread(d,t,k,1) def keys(self): for d,t in self.timerange(): for k in range(1,self.nlayers+1): yield d,t,k __iter__=keys def getArray(self,hp): a=zeros((self.time_step_count,len(self.dimensions['LAY']),len(self.dimensions['ROW']),len(self.dimensions['COL'])),'f') for ti,(d,t) in enumerate(self.timerange()): for ki,k in enumerate(xrange(1,self.nlayers+1)): self.seekandreadinto(a[ti,ki,...,...],d,t,k,hp) return a def timerange(self): return timerange((self.start_date,self.start_time),timeadd((self.end_date,self.end_time),(0,self.time_step),(2400,24)[int(self.time_step % 2)]),self.time_step,(2400,24)[int(self.time_step % 2)])
class wind(PseudoNetCDFFile): """ wind provides a PseudoNetCDF interface for CAMx wind files. Where possible, the inteface follows IOAPI conventions (see www.baronams.com). ex: >>> wind_path = 'camx_wind.bin' >>> rows,cols = 65,83 >>> windfile = wind(wind_path,rows,cols) >>> windfile.variables.keys() ['TFLAG', 'U', 'V'] >>> v = windfile.variables['V'] >>> tflag = windfile.variables['TFLAG'] >>> tflag.dimensions ('TSTEP', 'VAR', 'DATE-TIME') >>> tflag[0,0,:] array([2005185, 0]) >>> tflag[-1,0,:] array([2005185, 240000]) >>> v.dimensions ('TSTEP', 'LAY', 'ROW', 'COL') >>> v.shape (25, 28, 65, 83) >>> windfile.dimensions {'TSTEP': 25, 'LAY': 28, 'ROW': 65, 'COL': 83} """ time_hdr_fmts={12: "fii", 8: "fi"} data_fmt="f" def __init__(self,rf,rows=None,cols=None): """ Initialization included reading the header and learning about the format. see __readheader and __gettimestep() for more info """ self.rffile=OpenRecordFile(rf) self.time_hdr_fmt=self.time_hdr_fmts[self.rffile.record_size] self.time_hdr_size=struct.calcsize(self.time_hdr_fmt) self.padded_time_hdr_size=struct.calcsize("ii"+self.time_hdr_fmt) self.__readheader() self.__gettimestep() if rows==None and cols==None: rows=self.cell_count cols=1 elif rows==None: rows=self.cell_count/cols elif cols==None: cols=self.cell_count/rows else: if cols*rows!=self.cell_count: raise ValueError("The product of cols (%d) and rows (%d) must equal cells (%d)" % (cols,rows,self.cell_count)) self.createDimension('TSTEP', self.time_step_count) self.createDimension('COL', cols) self.createDimension('ROW', rows) self.createDimension('LAY', self.nlayers) self.variables=PseudoNetCDFVariables(self.__var_get,['U','V']) def __var_get(self,key): constr=lambda uv: self.getArray()[:,{'U': 0, 'V': 1}[uv],:,:,:].copy() decor=lambda uv: dict(units='m/s', var_desc=uv.ljust(16), long_name=uv.ljust(16)) values=constr(key) var=self.createVariable(key,'f',('TSTEP','LAY','ROW','COL')) var[:] = values for k,v in decor(key).items(): setattr(var,k,v) return var def __readheader(self): """ __readheader reads the header section of the ipr file it initializes each header field (see CAMx Users Manual for a list) as properties of the ipr class """ self.data_start_byte=0 if self.time_hdr_fmt=='fii': self.start_time,self.start_date,self.lstagger=self.rffile.read(self.time_hdr_fmt) elif self.time_hdr_fmt=='fi': self.start_time,self.start_date=self.rffile.read(self.time_hdr_fmt) self.lstagger=None else: raise NotImplementedError("Header format is unknown") self.record_size=self.rffile.record_size self.padded_size=self.record_size+8 self.cell_count=self.record_size/struct.calcsize(self.data_fmt) self.record_fmt=self.data_fmt*self.cell_count def __gettimestep(self): """ Header information provides start and end date, but does not indicate the increment between. This routine reads the first and second date/time and initializes variables indicating the timestep length and the anticipated number. """ #This is a bit of a hack, but should work: #Search for the next record that is the same #length as self.padded_time_hdr_size # #This should be the next date record #1) date - startdate = timestep #2) (record_start - self.padded_time_hdr_size)/self.padded_size = klayers self.rffile.next() while not self.rffile.record_size==self.time_hdr_size: self.rffile.next() dist_btwn_dates=self.rffile.record_start - self.padded_time_hdr_size self.nlayers=(dist_btwn_dates)/self.padded_size/2 if self.time_hdr_fmt=="fi": time,date=self.rffile.read(self.time_hdr_fmt) elif self.time_hdr_fmt=="fii": time,date,lstagger=self.rffile.read(self.time_hdr_fmt) self.time_step=timediff((self.start_date,self.start_time),(date,time)) while True: try: self.rffile._newrecord(self.rffile.record_start+dist_btwn_dates) self.rffile.tell() if self.time_hdr_fmt=="fi": self.end_time,self.end_date=self.rffile.read(self.time_hdr_fmt) elif self.time_hdr_fmt=="fii": self.end_time,self.end_date,lstagger=self.rffile.read(self.time_hdr_fmt) except: break self.time_step_count=int(timediff((self.start_date,self.start_time),(self.end_date,self.end_time))/self.time_step)+1 def __layerrecords(self,k): return k-1 def __timerecords(self,dt): """ routine returns the number of records to increment from the data start byte to find the first time """ d, t = dt nsteps=int(timediff((self.start_date,self.start_time),(d,t))/self.time_step) nlays=self.__layerrecords(self.nlayers+1) return nsteps*nlays def __recordposition(self,date,time,k,duv): """ routine uses pagridrecords, timerecords,irecords, jrecords, and krecords multiplied by the fortran padded size to return the byte position of the specified record pagrid - integer date - integer time - float duv - integer (0=date,1=uwind,2=vwind) """ bytes=self.data_start_byte nsteps=self.__timerecords((date,time)) bytes+=int(nsteps/self.nlayers)*self.padded_time_hdr_size bytes+=int(nsteps/self.nlayers)*12 bytes+=nsteps*self.padded_size*2 if not duv==0: bytes+=self.padded_time_hdr_size bytes+=self.__layerrecords(k)*2*self.padded_size if duv==2: bytes+=self.padded_size return bytes def seek(self,date=None,time=None,k=1,uv=1): """ Move file cursor to beginning of specified record see __recordposition for a definition of variables """ if date==None: date=self.start_date if time==None: time=self.start_time chkvar=True if chkvar and timediff((self.end_date,self.end_time),(date,time))>0 or timediff((self.start_date,self.start_time),(date,time))<0: raise KeyError("Wind file includes (%i,%6.1f) thru (%i,%6.1f); you requested (%i,%6.1f)" % (self.start_date,self.start_time,self.end_date,self.end_time,date,time)) if chkvar and uv<0 or uv >2: raise KeyError("Wind file includes Date (uv: 0), u velocity (uv: 1) and v velocity (uv: 2); you requested %i" % (uv)) self.rffile._newrecord(self.__recordposition(date,time,k,uv)) def read(self): """ provide direct access to the underlying RecordFile read method """ return self.rffile.read(self.record_fmt) def read_into(self,dest): """ put values from rffile read into dest dest - numpy or numeric array """ return read_into(self.rffile,dest,"",self.data_fmt) def seekandreadinto(self,dest,date=None,time=None,k=1,duv=1): """ see seek and read_into """ self.seek(date,time,k,duv) self.read_into(dest) def seekandread(self,date=None,time=None,k=1,duv=1): """ see seek and read """ self.seek(date,time,k,duv) return self.read() def keys(self): for d,t in timerange((self.start_date,self.start_time),timeadd((self.end_date,self.end_time),(0,self.time_step)),self.time_step): for k in range(1,self.nlayers+1): yield d,t,k def values(self): for d,t,k in self.keys(): yield self.seekandread(d,t,k,1),self.seekandread(d,t,k,2) def items(self): for d,t,k in self.keys(): yield d,t,k,self.seekandread(d,t,k,1),self.seekandread(d,t,k,2) __iter__=keys def getArray(self,krange=slice(1,None)): if type(krange) != slice : if type(krange)==tuple: krange = slice(*krange) if type(krange)==int: krange=slice(krange,krange+1) a=zeros( ( self.time_step_count , 2 , len(xrange(*krange.indices(self.nlayers+1))), len(self.dimensions['ROW']), len(self.dimensions['COL']), ),'f') for i,(d,t) in enumerate(self.timerange()): for uv in range(1,3): for ki,k in enumerate(xrange(*krange.indices(self.nlayers+1))): self.seekandreadinto(a[i,uv-1,k-1,:,:],d,t,k,uv) return a def timerange(self): return timerange((self.start_date,self.start_time),timeadd((self.end_date,self.end_time),(0,self.time_step)),self.time_step)