def __FCLOUD__(self): val = self.variables["CLOUD"] >= 5 var = PseudoNetCDFVariable(self, "FCLOUD", "i", ("TSTEP", "LAY", "ROW", "COL"), values=array(val, dtype="i")) var.units = "None" var.long_name = "FCLOUD".ljust(16) var.var_desc = "FCLOUD".ljust(16) return var
def ConvertCAMxTime(date, time, nvars): class temp: pass f = temp() f.dimensions = {'TSTEP': date.shape[0], 'VAR': nvars, 'DATE-TIME': 2} a = array([date, time], dtype='i').swapaxes(0, 1) if len(a.shape) == 2: a = a[:, newaxis, :] date = a[:, :, 0] if (date < 70000).any(): date += 2000000 else: date += 1900000 time = a[:, :, 1] while not (time == 0).all() and time.max() < 10000: time *= 100 a = PseudoNetCDFVariable(f, 'TFLAG', 'i', ('TSTEP', 'VAR', 'DATE-TIME'), values=a[:, [0], :].repeat(nvars, 1)) a.units = 'DATE-TIME'.ljust(16) a.long_name = 'TFLAG'.ljust(16) a.var_desc = a.long_name return a
def __add_variables(self): v = self.createVariable( 'TFLAG', 'i', ('TSTEP', 'VAR', 'DATE-TIME'), keep=True) v[:] = self.__windfile.variables['TFLAG'][self.__timeslice] v.long_name = 'Time flag' v.units = 'DATE-TIME' if self.__force_stagger and self.__windfile.LSTAGGER == 0: warn('Cell centered values are being averaged as though ' + 'staggered. Could just be pre v4.3 file that was actually ' + 'staggered') for k in ['U', 'V']: if self.__force_stagger or self.__windfile.LSTAGGER != 0: if k == 'U': preproc = CenterCAMxU elif k == 'V': preproc = CenterCAMxV else: preproc = CenterTime var = self.__windfile.variables[k] v = PseudoNetCDFVariable( self, k, 'f', ('TSTEP', 'LAY', 'ROW', 'COL'), values=preproc(var)) v.units = var.units v.long_name = k.ljust(16) v.var_desc = (k + ' at center').ljust(16) self.variables[k] = PseudoNetCDFVariableConvertUnit( v, self.__outunit)
def __variables(self, pk, proc_spc): if proc_spc in self.__ipr_record_type.names: proc = proc_spc proc_spc = proc_spc + '_' + self.spcnames[0] return PseudoNetCDFVariable( self, proc_spc, 'f', ('TSTEP', 'LAY', 'ROW', 'COL'), values=self.__memmaps[pk][:, 0, :, :, :][proc].swapaxes( 1, 3).swapaxes(2, 3)) if proc_spc == 'TFLAG': thisdate = self.__memmaps[pk][:, 0, :, :, :]['DATE'].swapaxes( 1, 3).swapaxes(2, 3)[..., 0, 0, 0] thistime = self.__memmaps[pk][:, 0, :, :, :]['TIME'].swapaxes( 1, 3).swapaxes(2, 3)[..., 0, 0, 0] return ConvertCAMxTime(thisdate, thistime, len(self.groups[pk].dimensions['VAR'])) for k in self.__ipr_record_type.names: proc = proc_spc[:len(k)] spc = proc_spc[len(k) + 1:] if proc == k and spc in self.spcnames: spc = self.spcnames.index(spc) dvals = self.__memmaps[pk][:, spc][proc].swapaxes(1, 3).swapaxes( 2, 3) return self.__decorator( proc_spc, PseudoNetCDFVariable(self, proc_spc, 'f', ('TSTEP', 'LAY', 'ROW', 'COL'), values=dvals)) raise KeyError("Bad!")
def setUp(self): from PseudoNetCDF import PseudoNetCDFFile mrg = self.mrg = PseudoNetCDFFile() mrg.createDimension('TSTEP', 9) mrg.variables = dict(EMIS_NO=PseudoNetCDFVariable(mrg, 'EMIS', 'f', ('TSTEP'), values=arange(9), units='ppb'), CHEM_NO=PseudoNetCDFVariable(mrg, 'CHEM', 'f', ('TSTEP'), values=arange(9)), EMIS_NO2=PseudoNetCDFVariable(mrg, 'EMIS', 'f', ('TSTEP'), values=arange(9), units='ppb'), CHEM_NO2=PseudoNetCDFVariable(mrg, 'CHEM', 'f', ('TSTEP'), values=arange(9), units='ppb')) self.processes = {} self.species = dict( NO=Species('NO'), NO2=Species('NO2'), ) exec('NOx = NO + NO2', None, self.species) self.testProcessFromCMAQ()
def __add_variables(self): tsteps=len(self.dimensions['TSTEP']) lays=len(self.dimensions['LAY']) rows=len(self.dimensions['ROW']) cols=len(self.dimensions['COL']) offset=len(self.__time_hdr_fmts)+2 block=(rows*cols+2)*2*lays out_idx=zeros(self.__memmap.shape,'b') for t in range(tsteps): start=(t+1)*offset+t*block+t*self.__dummy_length stop=start+block out_idx[start:stop].reshape(lays*2,rows*cols+2)[:,1:-1]=1 out_idx[start:stop].reshape(lays*2,rows*cols+2)[:,[0,-1]]=2 out_idx[start-offset:start]=3 buffer=self.__memmap[out_idx==2].reshape(tsteps,lays,2,2) if not (buffer[:,:,:,0]==buffer[:,:,:,1]).all(): raise ValueError('Fortran unformatted record start and end padding do not match.') date=self.__memmap[out_idx==3].reshape(tsteps,(out_idx==3).sum()//tsteps)[:,2].view('>i') time=self.__memmap[out_idx==3].reshape(tsteps,(out_idx==3).sum()//tsteps)[:,1] self.variables['TFLAG']=ConvertCAMxTime(date,time,2) self.variables['U']=self.__decorator('U',PseudoNetCDFVariable(self,'U','f',('TSTEP','LAY','ROW','COL'),values=self.__memmap[out_idx==1].reshape(tsteps,lays,2,rows,cols)[:,:,0,:,:])) self.variables['V']=self.__decorator('V',PseudoNetCDFVariable(self,'V','f',('TSTEP','LAY','ROW','COL'),values=self.__memmap[out_idx==1].reshape(tsteps,lays,2,rows,cols)[:,:,1,:,:]))
def __DEPTH__(self): val=CAMxHeightToDepth(self.variables['HGHT']) var=PseudoNetCDFVariable(self,'DEPTH','f',('TSTEP','LAY','ROW','COL'),values=val) var.units='m' var.long_name='RATE'.ljust(16) var.var_desc='RATE'.ljust(16) return var
def __PRECIP_RATE__(self): if "PRECIP" in self.variables.keys(): val = self.variables["PRECIP"] else: val = self.variables["RAIN"] + self.variables["SNOW"] + self.variables["GRAUPEL"] var = PseudoNetCDFVariable(self, "PRECIP_RATE", "f", ("TSTEP", "LAY", "ROW", "COL"), values=(val * 10) ** 1.27) var.units = "mm/h" var.long_name = "PRECIP_RATE".ljust(16) var.var_desc = "PRECIP_RATE".ljust(16) return var
def __set_var(self, key, vals_idx): times = len(self.dimensions['TSTEP']) lays = len(self.dimensions['LAY']) rows = len(self.dimensions['ROW']) cols = len(self.dimensions['COL']) v = PseudoNetCDFVariable(self, key, 'f', ('TSTEP', 'LAY', 'ROW', 'COL'), values = self.__memmap[vals_idx].reshape(times, lays, rows, cols)) v.units = {'COD':'None'}.get(key, 'g/m**3') v.long_name = key v.var_desc = key self.variables[key] = v
def __FCLOUD__(self): val = self.variables['CLOUD'] >= 5 var = PseudoNetCDFVariable(self, 'FCLOUD', 'i', ('TSTEP', 'LAY', 'ROW', 'COL'), values=array(val, dtype='i')) var.units = 'None' var.long_name = 'FCLOUD'.ljust(16) var.var_desc = 'FCLOUD'.ljust(16) return var
def __set_var(self, key, vals_idx): times = len(self.dimensions['TSTEP']) lays = len(self.dimensions['LAY']) rows = len(self.dimensions['ROW']) cols = len(self.dimensions['COL']) v = PseudoNetCDFVariable(self, key, 'f', ('TSTEP', 'LAY', 'ROW', 'COL'), values=self.__memmap[vals_idx].reshape( times, lays, rows, cols)) v.units = {'COD': 'None'}.get(key, 'g/m**3') v.long_name = key v.var_desc = key self.variables[key] = v
def __PRECIP_RATE__(self): if 'PRECIP' in self.variables.keys(): val = self.variables['PRECIP'] else: val = self.variables['RAIN'] + \ self.variables['SNOW'] + self.variables['GRAUPEL'] var = PseudoNetCDFVariable(self, 'PRECIP_RATE', 'f', ('TSTEP', 'LAY', 'ROW', 'COL'), values=(val * 10)**1.27) var.units = 'mm/h' var.long_name = 'PRECIP_RATE'.ljust(16) var.var_desc = 'PRECIP_RATE'.ljust(16) return var
def relhum_ttd(t, td, percent = False, add = True): """ t - temperature in K td - dewpoint temperature in K ; ; Calculate relative humidity given temperature (K) ; and dew point temperature (K) ; ; reference: John Dutton, Ceaseless Wind, 1976 """ dimensions = getattr(t, 'dimensions', getattr(td, 'dimensions', ('unknown',))) gc = 461.5 # [j/{kg-k}] gas constant water vapor gc = gc/(1000.*4.186) # [cal/{g-k}] change units # lhv=latent heat vap lhv = ( 597.3-0.57*(t-273.) ) # dutton top p273 [empirical] values = np.ma.exp( (lhv/gc)*(1.0/t - 1.0/td)) if percent: values *= 100. rh = PseudoNetCDFVariable(None, 'RH', 'f', dimensions, values = values ) rh.long_name = "relative humidity" if percent: rh.units = "%" else: rh.units = "fraction" rh.short_name = 'RH' return rh
def __var_get(self, key): lays = len(self.dimensions['LAY']) times = len(self.dimensions['TSTEP']) rows = len(self.dimensions['ROW']) cols = len(self.dimensions['COL']) surf = 1 air = 2 time = 3 date = 4 out_idx = zeros(self.__memmap.shape, dtype='b').reshape(times, lays + 1, rows * cols + 4) out_idx[:, 0, 3:-1] = surf out_idx[:, 1:, 3:-1] = air out_idx[:, :, 1] = time out_idx[:, :, 2] = date out_idx = out_idx.ravel() buf = self.__memmap[out_idx == 0].reshape((lays + 1) * times, 2) if not (buf[:, 0] == buf[:, 1]).all(): raise ValueError("Buffer") v = self.variables['SURFTEMP'] = PseudoNetCDFVariable( self, 'SURFTEMP', 'f', ('TSTEP', 'ROW', 'COL'), values=self.__memmap[out_idx == 1].reshape(times, rows, cols)) v.units = 'K' v.long_name = 'SURFTEMP' v.var_desc = 'SURFTEMP' v = self.variables['AIRTEMP'] = PseudoNetCDFVariable( self, 'AIRTEMP', 'f', ('TSTEP', 'LAY', 'ROW', 'COL'), values=self.__memmap[out_idx == 2].reshape(times, lays, rows, cols)) v.units = 'K' v.long_name = 'AIRTEMP' v.var_desc = 'AIRTEMP' date = self.__memmap[out_idx == date].view('>i')[0:None:lays + 1] time = self.__memmap[out_idx == time].view('>f')[0:None:lays + 1] self.variables['TFLAG'] = PseudoNetCDFVariable( self, 'TFLAG', 'f', ('TSTEP', 'VAR', 'DATE-TIME'), values=ConvertCAMxTime(date, time, 2)) return self.variables[key]
def __var_get(self, key): lays = len(self.dimensions['LAY']) times = len(self.dimensions['TSTEP']) rows = len(self.dimensions['ROW']) cols = len(self.dimensions['COL']) hght = 1 pres = 2 time = 3 date = 4 out_idx = zeros(self.__memmap.shape, dtype='b').reshape(times, lays, 2, rows * cols + 4) out_idx[:, :, 0, 3:-1] = hght out_idx[:, :, 1, 3:-1] = pres out_idx[:, :, :, 1] = time out_idx[:, :, :, 2] = date out_idx = out_idx.ravel() buf = self.__memmap[out_idx == 0].reshape(lays * 2 * times, 2) if not (buf[:, 0] == buf[:, 1]).all(): raise ValueError("Buffer") v = self.variables['HGHT'] = PseudoNetCDFVariable( self, 'HGHT', 'f', ('TSTEP', 'LAY', 'ROW', 'COL'), values=self.__memmap[out_idx == 1].reshape(times, lays, rows, cols)) v.units = 'm' v.long_name = 'HGHT'.ljust(16) v.var_desc = 'Top Height' v = self.variables['PRES'] = PseudoNetCDFVariable( self, 'PRES', 'f', ('TSTEP', 'LAY', 'ROW', 'COL'), values=self.__memmap[out_idx == 2].reshape(times, lays, rows, cols)) v.units = 'hPA' v.long_name = 'PRES'.ljust(16) v.var_desc = 'Pressure at center' self.variables['TFLAG'] = ConvertCAMxTime( self.__memmap[out_idx == 4][slice(None, None, len(self.dimensions['LAY']) * 2)].view('>i'), self.__memmap[out_idx == 3][slice(None, None, len(self.dimensions['LAY']) * 2)], len(self.dimensions['VAR'])) return self.variables[key]
def __getitem__(self, key): if isinstance(key, Species): values = array([self.data[spc] for spc in key.spc_dict.keys()]).sum(0) elif key in self.data.keys(): values = self.data[key] else: new_data = dict([(k, v[key]) for k, v in self.data.iteritems()]) return Process(self.name, self.__units, **new_data) return PseudoNetCDFVariable(self, self.name, values.dtype.char, ('TSTEP',), values = values, units = self.__units)
def __time_stks(self): ep = self.__endianprefix i = offset = 0 nspcs = len(self.__spc_names) nstks = len(self.dimensions['NSTK']) date_block_size = 6 stk_block_size = 4 stk_props_size = 2 + nstks * 5 emiss_block_size = nspcs * (nstks + 13) hour_block_size = date_block_size + stk_block_size + stk_props_size + emiss_block_size data = self.__memmap[self.__data_start:] data = data.reshape(data.size // hour_block_size, hour_block_size) ntimes = data.shape[0] self.createDimension('TSTEP', ntimes) self.createDimension('DATE-TIME', 2) start = 0 end = date_block_size date_times = data[:, start:end] dates = date_times[:, [1, 3]].view(ep + 'i') times = date_times[:, [2, 4]] start = end end = start + stk_block_size nstk_hdr = data[:, start:end].view(ep + 'i') if not (nstks == nstk_hdr[:, 2:3]).all(): raise ValueError("Number of stacks varies with time") start = end end = start + stk_props_size self.__hourly_stk_props = data[:, start:end][:, 1:-1].reshape( ntimes, nstks, 5) start = end end = start + emiss_block_size if not end == data.shape[1]: raise ValueError("Incorrect shape") self.__emiss_data = data[:, start:].reshape(ntimes, nspcs, 13 + nstks)[:, :, 12:-1] bdates = dates[:, 0] btimes = times[:, 0] edates = dates[:, 1] etimes = times[:, 1] self.NSTEPS = ntimes self.createDimension('VAR', len(self.__spc_names) + 3) self.variables['TFLAG'] = ConvertCAMxTime(bdates, btimes, len(self.dimensions['VAR'])) self.variables['ETFLAG'] = ConvertCAMxTime(edates, etimes, len(self.dimensions['VAR'])) v = self.variables['NSTKS'] = PseudoNetCDFVariable(self, 'NSTKS', 'i', ('TSTEP', ), values=array( nstk_hdr[:, 2])) v.units = '#'.ljust(16) v.long_name = 'NSTKS'.ljust(16) v.var_desc = v.long_name
def __variables(self, k): outunit = self.__outunit.get(k, None) var = self.__file.variables[k] if outunit is None: outunit = var.units tmpvar = PseudoNetCDFVariable(self, k, var.typecode(), var.dimensions, values=CenterTime(var)) return PseudoNetCDFVariableConvertUnit(self.__decorator(var, tmpvar), outunit)
def __variables(self, proc_spc): if proc_spc == 'TFLAG': time = self.variables['TIME_%s' % char.decode(self.spcnames)[0][1].strip()] date = self.variables['DATE_%s' % char.decode(self.spcnames[0])[1].strip()] self.variables['TFLAG'] = PseudoNetCDFVariable( self, 'proc_spc', 'i', ('TSTEP', 'VAR', 'DATE-TIME'), values=ConvertCAMxTime(date[:, 0, 0, 0], time[:, 0, 0, 0], len(self.dimensions['VAR']))) return self.variables['TFLAG'] self.variables.clear() for k in self.proc_dict: proc = proc_spc[:len(k)] spc = proc_spc[len(k) + 1:] if proc == k and spc.ljust(10) in char.decode( self.spcnames['SPECIES']).tolist(): spcprocs = self.__readalltime(spc) for p, plong in self.proc_dict.items(): var_name = p + '_' + spc # IPR units are consistent with 'IPR' if p == 'UCNV': units = 'm**3/mol' elif p == 'AVOL': units = 'm**3' else: units = get_uamiv_units('IPR', spc) self.variables[var_name] = PseudoNetCDFVariable( self, var_name, 'f', ('TSTEP', 'LAY', 'ROW', 'COL'), values=spcprocs[p], units=units, var_desc=(var_name).ljust(16), long_name=(var_name).ljust(16)) del spcprocs return self.variables[proc_spc] raise KeyError("Bad!")
def __variables(self, k): dimensions = ('TSTEP', 'LAY', 'ROW', 'COL') outvals = self.__memmap__[self.CURRENT_GRID][k]['DATA'][:, :, :, :] unit = get_uamiv_units('INSTANT ', k) return PseudoNetCDFVariable(self, k, 'f', dimensions, values=outvals, units=unit)
def __variables(self, pk, rxn): if rxn == 'TFLAG': return ConvertCAMxTime(self.__memmaps[pk][:, 0, 0, 0]['DATE'], self.__memmaps[pk][:, 0, 0, 0]['TIME'], len(self.groups[pk].dimensions['VAR'])) return self.__decorator( rxn, PseudoNetCDFVariable(self, rxn, 'f', ('TSTEP', 'LAY', 'ROW', 'COL'), values=self.__memmaps[pk][rxn].swapaxes( 1, 3).swapaxes(2, 3)))
def wswd_uv(u, v, return_radians = False): """ Arguments: u - array of u-component winds v - array of v-domponent winds return_radians - convert direction to radians Returns: ws, wd - wind speed and direction (from direction) https://www.eol.ucar.edu/projects/ceop/dm/documents/refdata_report/eqns.html """ dimensions = getattr(u, 'dimensions', getattr(v, 'dimensions', ('unknown',))) units = getattr(u, 'units', getattr(v, 'units', 'unknown')) wind_speed = np.sqrt(u**2 + v**2); uvarctan = np.arctan(u/v) wind_direction = np.where(v<0, uvarctan*180./np.pi, uvarctan * 180./np.pi + 180.) if return_radians: wind_direction = np.radians(wind_direction) ws = PseudoNetCDFVariable(None, 'WS', wind_speed.dtype.char, dimensions, values = wind_speed, units = units, short_name = 'WS') wd = PseudoNetCDFVariable(None, 'WD', wind_speed.dtype.char, dimensions, values = wind_direction, units = 'radians' if return_radians else 'degrees', short_name = 'WD') return ws, wd
def loadVars(self, start, n, pagrid=0): domain = self.padomains[pagrid] istart = domain['istart'] iend = domain['iend'] jstart = domain['jstart'] jend = domain['jend'] kstart = domain['blay'] kend = domain['tlay'] variables = self.variables nk = kend + 1 - kstart nj = jend + 1 - jstart ni = iend + 1 - istart nrec = nk * ni * nj temp = zeros((nrec, self.NRXNS), 'f') shape = (self.NSTEPS, ) + tuple( eval('map(len, (LAY, ROW, COL))', None, self.dimensions)) variables.clear() end = min(start + n, self.NRXNS + 1) start = max(1, end - n) for rxn in range(start, end): key = 'RXN_%02d' % rxn variables[key] = PseudoNetCDFVariable( self, key, 'f', ('TSTEP', 'LAY', 'ROW', 'COL'), values=zeros(shape, 'f'), units='ppm/hr', var_desk=key.ljust(16), long_name=key.ljust(16)) self._seek(pagrid=0, i=istart, j=jstart, k=kstart) for ti, (d, t) in enumerate(self.timerange()): record = fromfile(self._rffile, dtype=self._record_dtype, count=nrec) temp[:] = record['IRRS'] date = record['DATE'] time = record['TIME'] id = record['I'] jd = record['J'] kd = record['K'] assert ((id == arange(istart, iend + 1)[None, :, None].repeat( nk, 2).repeat(nj, 0).ravel()).all()) assert ((kd == arange(kstart, kend + 1)[None, None, :].repeat( ni, 1).repeat(nj, 0).ravel()).all()) assert (((jd == arange(jstart, jend + 1).repeat(ni * nk, 0))).all()) assert ((date == d).all() and (time == t).all()) for rxn in range(start, end): variables['RXN_%02d' % rxn][ti, :, :, :] = temp[:, rxn - 1].reshape( nj, ni, nk).swapaxes(1, 2).swapaxes(0, 1)
def uv_wswd(ws, wd, isradians = False): """ Arguments: ws - wind speed array wd - wind direction array (in degrees unless isradians is set to true) isradians - False if WD is in degrees Returns: U, V - u-component and v-component winds https://www.eol.ucar.edu/projects/ceop/dm/documents/refdata_report/eqns.html """ dimensions = getattr(u, 'dimensions', getattr(v, 'dimensions', ('unknown',))) units = getattr(ws, 'units', 'unknown') if isradians: direction = wd else: direction = np.radians(wd) wind_speed = ws U = -np.sin(direction) * wind_speed; V = -np.cos(direction) * wind_speed; u = PseudoNetCDFVariable(None, 'U', U.dtype.char, dimensions, values = U, units = units, short_name = 'U') v = PseudoNetCDFVariable(None, 'V', V.dtype.char, dimensions, values = V, units = units, short_name = 'V') return u, v
def __variables(self, k): tsteps = len(self.dimensions['TSTEP']) lays = len(self.dimensions['LAY']) rows = len(self.dimensions['ROW']) cols = len(self.dimensions['COL']) return self.__decorator( k, PseudoNetCDFVariable(self, k, 'f', ('TSTEP', 'LAY', 'ROW', 'COL'), values=self.__memmap.reshape( self.__records, self.__record_items)[:, 3:-1].reshape( tsteps, lays, rows, cols)))
def __add_variables(self): v=self.createVariable('TFLAG','i',('TSTEP','VAR','DATE-TIME'),keep=True) v[:] = self.__windfile.variables['TFLAG'][self.__timeslice] v.long_name='Time flag' v.units='DATE-TIME' if self.__force_stagger and self.__windfile.LSTAGGER==0: warn('Cell centered values are being averaged as though staggered'+ \ 'Could just be pre v4.3 file that was actually staggered') for k in ['U','V']: if self.__force_stagger or self.__windfile.LSTAGGER!=0: if k=='U': preproc=CenterCAMxU elif k=='V': preproc=CenterCAMxV else: preproc=CenterTime var=self.__windfile.variables[k] v=PseudoNetCDFVariable(self,k,'f',('TSTEP','LAY','ROW','COL'),values=preproc(var)) v.units=var.units v.long_name=k.ljust(16) v.var_desc=(k+' at center').ljust(16) self.variables[k]=PseudoNetCDFVariableConvertUnit(v,self.__outunit)
def ConvertCAMxTime(date,time,nvars): class temp: pass f = temp() f.dimensions = {'TSTEP': date.shape[0], 'VAR': nvars, 'DATE-TIME': 2} a=array([date,time],dtype='i').swapaxes(0,1) if len(a.shape)==2: a=a[:,newaxis,:] date=a[:,:,0] if (date<70000).any(): date+=2000000 else: date+=1900000 time=a[:,:,1] while not (time==0).all() and time.max()<10000: time*=100 a=PseudoNetCDFVariable(f,'TFLAG','i',('TSTEP','VAR','DATE-TIME'),values=a[:,[0],:].repeat(nvars,1)) a.units='DATE-TIME'.ljust(16) a.long_name='TFLAG'.ljust(16) a.var_desc=a.long_name return a
def wmrq_ptd(p, td, dry = False, kgkg = False): """ p - pressure in Pa td - temperature in degrees celcius dry - return results in mixing ratio or specific humidity in dry air kgkg - return results in kg/kg instead of g/kg """ dimensions = getattr(p, 'dimensions', getattr(td, 'dimensions', ('unknown',))) p = np.asarray(p) td = np.asarray(td) # ncl: q = mixhum_ptd (p,td,option) [ q=g/kg ] # p - PA # td - K # local # INTEGER N # DOUBLE PRECISION T0, PA2MB # DATA T0 /273.15d0/ # DATA PA2MB /0.01d0 / T0 = 273.15 PA2MB = .01 # mixing ratio (kg/kg) # the function wants hPA (mb) and degrees centigrade WMR = wmr_skewt_pt(p*PA2MB, td - T0) * 0.001 # if iswit=2 calculate specific humidity (kg/kg) if dry: name = 'W' long_name = 'mass of water per mass of dry air' else: name = 'Q' long_name = 'mass of water per mass of air (dry+wet)' WMR = WMR/(WMR + 1.0) # if iswit < 0 then return g/kg if kgkg: WMR *= 1000. units = 'g/kg' else: units = 'kg/kg' wmr = PseudoNetCDFVariable(None, name, 'f', dimensions, values = WMR, units = units, long_name = long_name ) wmr.short_name = name return wmr
def __getitem__(self, key): units = self.__units if isinstance(key, Species): values = array([ self.data[spc] * prop['stoic'] for spc, prop in key.spc_dict.items() ]).sum(0) units = dict([(k, self.__units[k]) for k in key.spc_dict.keys()]) elif key in self.data.keys(): values = self.data[key] units = {key: self.__units[key]} else: new_data = dict([(k, v[key]) for k, v in self.data.iteritems()]) units = {key: self.__units[key]} return Process(self.name, self.__units, **new_data) return PseudoNetCDFVariable(self, self.name, values.dtype.char, ('TSTEP', ), values=values, units=units)
def wmr_ptd( p, td, gkg = False): """ Inputs: p = surface pressure in mb; Td = dew point in deg C; Calculates e = vapor pressure in mb; Returns q = specific humidity in kg/kg. """ dimensions = getattr(p, 'dimensions', getattr(td, 'dimensions', 'unknown')) e = 6.112*np.ma.exp((17.67*td)/(td + 243.5)); q = (0.622 * e)/(p - (0.378 * e)); if gkg: q *= 1000. units = 'g/kg' else: units = 'kg/kg' Q = PseudoNetCDFVariable(None, 'Q', q.dtype.char, dimensions, units = units, long_name = 'specific humidity in kg/kg', short_name = 'Q', values = q) return Q
def childvariables(self, k): for f in self.__files: if k in f.variables.keys(): v = f.variables[k] if k=='TFLAG': v = PseudoNetCDFVariable(self, 'TFLAG', 'i', v.dimensions, values = v[:][:, [0], :].repeat(len(self.dimensions['VAR']), 1)) v.long_name = 'TFLAG'.ljust(16) v.var_desc = 'TFLAG'.ljust(16) v.units = 'DATE-TIME' if k=='LAY' and k in self.dimensions.keys() and len(k.shape) > 1: if v.shape[1]==1: dims = list(v.dimensions) dims[1] = 'SURFLAY' v.dimensions = tuple(dims) return v
def __init__(self, path, keysubs={'/': '_'}, encoding='utf-8', default_llod_flag=-8888, default_llod_value='N/A', default_ulod_flag=-7777, default_ulod_value='N/A'): """ Arguments: self - implied input (not supplied in call) path - path to file keysubs - dictionary of characters to remove from variable keys and their replacements encoding - file encoding (utf-8, latin1, cp1252, etc.) default_llod_flag - flag value for lower limit of detections if not specified default_llod_value - default value to use for replacement of llod_flag default_ulod_flag - flag value for upper limit of detections if not specified default_ulod_value - default value to use for replacement of ulod_flag Returns: out - PseudoNetCDFFile interface to data in file. """ lastattr = None PseudoNetCDFFile.__init__(self) f = openf(path, 'rU', encoding=encoding) missing = [] units = [] l = f.readline() if ',' in l: delim = ',' else: delim = None split = lambda s: [s_.strip() for s_ in s.split(delim)] if split(l)[-1] != '1001': raise TypeError( "File is the wrong format. Expected 1001; got %s" % (split(l)[-1], )) n, self.fmt = split(l) n_user_comments = 0 n_special_comments = 0 self.n_header_lines = int(n) try: for li in range(self.n_header_lines - 1): li += 2 l = f.readline() LAST_VAR_DESC_LINE = 12 + len(missing) SPECIAL_COMMENT_COUNT_LINE = LAST_VAR_DESC_LINE + 1 LAST_SPECIAL_COMMENT_LINE = SPECIAL_COMMENT_COUNT_LINE + n_special_comments USER_COMMENT_COUNT_LINE = 12 + len( missing) + 2 + n_special_comments if li == PI_LINE: self.PI_NAME = l.strip() elif li == ORG_LINE: self.ORGANIZATION_NAME = l.strip() elif li == PLAT_LINE: self.SOURCE_DESCRIPTION = l.strip() elif li == MISSION_LINE: self.MISSION_NAME = l.strip() elif li == VOL_LINE: self.VOLUME_INFO = ', '.join(split(l)) elif li == DATE_LINE: l = l.replace(',', ' ').replace('-', ' ').replace(' ', ' ').split() SDATE = ", ".join(l[:3]) WDATE = ", ".join(l[3:]) self.SDATE = SDATE self.WDATE = WDATE self._SDATE = datetime.strptime(SDATE, '%Y, %m, %d') self._WDATE = datetime.strptime(WDATE, '%Y, %m, %d') elif li == TIME_INT_LINE: self.TIME_INTERVAL = l.strip() elif li == UNIT_LINE: units.append(l.replace('\n', '').replace('\r', '').strip()) self.INDEPENDENT_VARIABLE = units[-1] elif li == SCALE_LINE: scales = [eval(i) for i in split(l)] if set([float(s) for s in scales]) != set([1.]): raise ValueError( "Unsupported: scaling is unsupported. data is scaled by %s" % (str(scales), )) elif li == MISSING_LINE: missing = [eval(i) for i in split(l)] elif li > MISSING_LINE and li <= LAST_VAR_DESC_LINE: nameunit = l.replace('\n', '').split(',') name = nameunit[0].strip() if len(nameunit) > 1: units.append(nameunit[1].strip()) elif re.compile('(.*)\((.*)\)').match(nameunit[0]): desc_groups = re.compile('(.*)\((.*)\).*').match( nameunit[0]).groups() name = desc_groups[0].strip() units.append(desc_groups[1].strip()) elif '_' in name: units.append(name.split('_')[1].strip()) else: warn('Could not find unit in string: "%s"' % l) units.append(name.strip()) elif li == SPECIAL_COMMENT_COUNT_LINE: n_special_comments = int(l.replace('\n', '')) elif li > SPECIAL_COMMENT_COUNT_LINE and li <= LAST_SPECIAL_COMMENT_LINE: colon_pos = l.find(':') if l[:1] == ' ': k = lastattr v = getattr(self, k, '') + l else: k = l[:colon_pos].strip() v = l[colon_pos + 1:].strip() setattr(self, k, v) lastattr = k elif li == USER_COMMENT_COUNT_LINE: lastattr = None n_user_comments = int(l.replace('\n', '')) elif li > USER_COMMENT_COUNT_LINE and li < self.n_header_lines: colon_pos = l.find(':') if l[:1] == ' ': k = lastattr v = getattr(self, k, '') + l else: k = l[:colon_pos].strip() v = l[colon_pos + 1:].strip() setattr(self, k, v) lastattr = k elif li == self.n_header_lines: variables = l.replace(',', ' ').replace(' ', ' ').split() for oc, nc in keysubs.items(): variables = [vn.replace(oc, nc) for vn in variables] self.TFLAG = variables[0] except Exception as e: raise SyntaxError("Error parsing icartt file %s: %s" % (path, repr(e))) missing = missing[:1] + missing scales = [1.] + scales if hasattr(self, 'LLOD_FLAG'): llod_values = loddelim.sub('\n', self.LLOD_VALUE).split() if len(llod_values) == 1: llod_values *= len(variables) else: llod_values = ['N/A'] + llod_values assert len(llod_values) == len(variables) llod_values = [get_lodval(llod_val) for llod_val in llod_values] llod_flags = len(llod_values) * [self.LLOD_FLAG] llod_flags = [get_lodval(llod_flag) for llod_flag in llod_flags] else: llod_flags = [default_llod_flag] * len(scales) llod_values = [default_llod_value] * len(scales) if hasattr(self, 'ULOD_FLAG'): ulod_values = loddelim.sub('\n', self.ULOD_VALUE).split() if len(ulod_values) == 1: ulod_values *= len(variables) else: ulod_values = ['N/A'] + ulod_values assert len(ulod_values) == len(variables) ulod_values = [get_lodval(ulod_val) for ulod_val in ulod_values] ulod_flags = len(ulod_values) * [self.ULOD_FLAG] ulod_flags = [get_lodval(ulod_flag) for ulod_flag in ulod_flags] else: ulod_flags = [default_ulod_flag] * len(scales) ulod_values = [default_ulod_value] * len(scales) data = f.read() datalines = data.split('\n') ndatalines = len(datalines) while datalines[-1] in ('', ' ', '\r'): ndatalines -= 1 datalines.pop(-1) data = genfromtxt(StringIO('\n'.join(datalines).encode()), delimiter=delim, dtype='d') data = data.reshape(ndatalines, len(variables)) data = data.swapaxes(0, 1) self.createDimension('POINTS', ndatalines) for var, scale, miss, unit, dat, llod_flag, llod_val, ulod_flag, ulod_val in zip( variables, scales, missing, units, data, llod_flags, llod_values, ulod_flags, ulod_values): vals = MaskedArray(dat, mask=dat == miss, fill_value=miss) tmpvar = self.variables[var] = PseudoNetCDFVariable(self, var, 'd', ('POINTS', ), values=vals) tmpvar.units = unit tmpvar.standard_name = var tmpvar.missing_value = miss tmpvar.fill_value = miss tmpvar.scale = scale if hasattr(self, 'LLOD_FLAG'): tmpvar.llod_flag = llod_flag tmpvar.llod_value = llod_val if hasattr(self, 'ULOD_FLAG'): tmpvar.ulod_flag = ulod_flag tmpvar.ulod_value = ulod_val self._date_objs = self._SDATE + vectorize(lambda s: timedelta( seconds=int(s), microseconds=(s - int(s)) * 1.E6))( self.variables[self.TFLAG]).view(type=ndarray)
def __variables(self, k): if k in ['TFLAG', 'ETFLAG', 'NSTKS']: return self.variables[k] elif k in ['XSTK', 'YSTK', 'HSTK', 'DSTK', 'TSTK', 'VSTK']: v = PseudoNetCDFVariable( self, k, 'f', ('NSTK',), values=self.__stk_props[k].ravel()) v.units = {'XSTK': 'm', 'YSTK': 'm', 'HSTK': 'm', 'DSTK': 'm', 'TSTK': 'K', 'VSTK': 'm/h'}[k] v.long_name = k.ljust(16) v.var_desc = k.ljust(16) return v elif k in ['IONE', 'ITWO', 'KCELL', 'FLOW', 'PLMHT']: data_type = {'IONE': 'i', 'ITWO': 'i', 'KCELL': 'i', 'FLOW': 'f', 'PLMHT': 'f'}[k] v = self.createVariable(k, data_type, ('TSTEP', 'NSTK')) v.units = {'IONE': '#', 'ITWO': '#', 'KCELL': '#', 'FLOW': 'm**3/hr', 'PLMHT': 'm'}[k] v.long_name = k.ljust(16) v.var_desc = k.ljust(16) vals = self.__hourly_stk_props[:, :, [ 'IONE', 'ITWO', 'KCELL', 'FLOW', 'PLMHT'].index(k)] v[:] = vals.view('>' + data_type) return v elif k in self.__spc_names: vals = self.__emiss_data[:, self.__getspcidx(k), :] v = PseudoNetCDFVariable(self, k, 'f', ('TSTEP', 'NSTK'), values=vals) v.units = 'mole/hr'.ljust(16) v.long_name = k.ljust(16) v.var_desc = k.ljust(16) return v else: raise KeyError("Unknown key %s" % k)
def __variables(self,k): if k in ['TFLAG','ETFLAG','NSTKS']: return self.variables[k] elif k in ['XSTK','YSTK','HSTK','DSTK','TSTK','VSTK']: v=PseudoNetCDFVariable(self,k,'f',('NSTK',),values=self.__stk_props[k].ravel()) v.units={'XSTK':'m','YSTK':'m','HSTK':'m','DSTK':'m','TSTK':'K','VSTK':'m/h'}[k] v.long_name=k.ljust(16) v.var_desc=k.ljust(16) return v elif k in ['IONE', 'ITWO', 'KCELL','FLOW','PLMHT']: data_type={'IONE':'i', 'ITWO':'i', 'KCELL':'i','FLOW':'f','PLMHT':'f'}[k] v=self.createVariable(k,data_type,('TSTEP','NSTK')) v.units={'IONE':'#', 'ITWO':'#', 'KCELL':'#', 'FLOW':'m**3/hr', 'PLMHT':'m'}[k] v.long_name=k.ljust(16) v.var_desc=k.ljust(16) vals = self.__hourly_stk_props[:,:,['IONE','ITWO','KCELL','FLOW','PLMHT'].index(k)] v[:] = vals.view('>' + data_type) return v elif k in self.__spc_names: v=PseudoNetCDFVariable(self,k,'f',('TSTEP','NSTK'),values=self.__emiss_data[:,self.__getspcidx(k),:]) v.units='mole/hr'.ljust(16) v.long_name=k.ljust(16) v.var_desc=k.ljust(16) return v else: raise KeyError("Unknown key %s" % k)
def getem(key): thisblock = data[0]['data'][key] thisdata = thisblock['f6'] assert((thisblock['f3'] == thisblock['f7']).all()) if len(thisdata.shape) == 3: dims = ('time', 'latitude', 'longitude') elif thisdata.shape[1] == nlay_in: dims = ('time', 'layer', 'latitude', 'longitude') elif thisdata.shape[1] == nlay_in_stag: dims = ('time', 'layer_stag', 'latitude', 'longitude') else: raise ValueError('Wrong layers got %d not %d or %d' % (thisdata.shape[1], nlay, nlay_stag)) unit = _geos_units.get(key, '') if dims != ('time', 'latitude', 'longitude'): thisdatain = thisdata thisdata = np.zeros([len(self.dimensions[k]) for k in dims], dtype=thisdata.dtype) if reduced: if self.gtype == 'GEOS-4-REDUCED': # !---------------------------------------------------- # ! GEOS-4: Lump 55 levels into 30 levels, starting # ! above L=20 # ! Lump levels in groups of 2, then 4. (cf. Mat Evans) # !---------------------------------------------------- # # ! Lump 2 levels together at a time lump_groups = [[0, ], [1, ], [2, ], [3, ], [4, ], [5, ], [6, ], [7, ], [8, ], [9, ], [10, ], [11, ], [12, ], [13, ], [14, ], [15, ], [16, ], [17, ], [18, ]] + \ [[19, 20], [21, 22], [23, 24], [25, 26]] + \ [[27, 28, 29, 30], [31, 32, 33, 34], [35, 36, 37, 38], [39, 40, 41, 42], [43, 44, 45, 46], [47, 48, 49, 50], [51, 52, 53, 54]] elif self.gtype == 'GEOS-5-REDUCED': # !---------------------------------------------------- # ! GEOS-5/MERRA: Lump 72 levels into 47 levels, # ! starting above L=36. Lump levels in groups of 2, # ! then 4. (cf. Bob Yantosca) # !---------------------------------------------------- # # ! Lump 2 levels together at a time lump_groups = [[0, ], [1, ], [2, ], [3, ], [4, ], [5, ], [6, ], [7, ], [8, ], [9, ], [10, ], [11, ], [12, ], [13, ], [14, ], [15, ], [16, ], [17, ], [18, ], [19, ], [20, ], [21, ], [22, ], [23, ], [24, ], [25, ], [26, ], [27, ], [28, ], [29, ], [30, ], [31, ], [32, ], [33, ], [34, ], [35, ]] + \ [[36, 37], [38, 39], [40, 41], [42, 43]] + \ [[44, 45, 46, 47], [48, 49, 50, 51], [52, 53, 54, 55], [56, 57, 58, 59], [60, 61, 62, 63], [64, 65, 66, 67], [68, 69, 70, 71]] else: raise ValueError('Cannot reduce %' % self.gtype) assert(len(lump_groups) == nlay) for li, lump_group in enumerate(lump_groups): if (len(lump_group) == 1 or dims[1] == 'layer_stag' or key == 'PLE'): thisdata[:, li] = thisdatain[:, lump_group[0]] elif dims[1] == 'layer': # assumes lumping only happens above pure eta # true for (GEOS4 and GEOS5) thisdata[:, li] = lump( thisdatain, self.Ap_NATIVE, lump_group) else: raise ValueError('huh?') else: if dims[1] == 'layer_stag': thisdata[:, li + 1] = thisdatain[:, lump_group[-1]] else: thisdata = thisdatain return PseudoNetCDFVariable(self, key, 'f', dims, values=thisdata, units=unit, long_name=key.ljust(16))
def __variables(self, k): return PseudoNetCDFVariable(self, k, 'f', ('TSTEP',), values=self.__memmap__[k], var_desc=k.ljust(16), long_name=k.ljust(16))