def movie(self,fieldname="salt",jd1=None,jd2=None,k=0): if not hasattr(self,'mp'): self.mp = projmaps.Projmap(self.region) self.xll,self.yll = self.mp(self.llon,self.llat) self.xl,self.yl = self.mp( [self.llon[0,0], self.llon[0,-1], self.llon[-1,-1], self.llon[-1,0],self.llon[0,0]], [self.llat[0,0], self.llat[0,-1], self.llat[-1,-1], self.llat[-1,0], self.llat[0,0]] ) jd1 = pl.date2num(dtm(2006,9, 1)) jd2 = pl.date2num(dtm(2006,9,30)) for jd in np.arange(jd1,jd2,0.125): self.load(fieldname,jd) pl.clf() self.mp.plot(self.xl,self.yl,'0.5') self.mp.pcolormesh(self.xll,self.yll, miv(self.salt[k,:,:]), cmap=cm.Paired) pl.clim(15,32) pl.colorbar(pad=0,aspect=40, orientation="horizontal",shrink=0.5) print jd pl.title(pl.num2date(jd).strftime("%Y-%m-%d %H:%M")) pl.savefig('figs/movies/salt_%i.png' % int(jd*1000),dpi=100)
def generate_filename(self, fld='chl', fldtype="DAY", **kwargs): """Generate filename""" if len(kwargs): self._timeparams(**kwargs) ydmax = (pl.date2num(dtm(self.yr, 12, 31)) - pl.date2num(dtm(self.yr, 1, 1))) + 1 if fldtype == "MC": self.add_mnclim() datestr = self.mc_datedict[self.mn] if "mo" in fldtype.lower(): self.add_filepreflist(fldtype="mo") datestr = self.mo_fileprefs[self.yr*100 + self.mn] elif fldtype == "DAY": datestr = "%i%03i" % (self.yr, self.yd) elif fldtype == "8D": yd1 = np.arange(1,365,8) yd2 = np.arange(8,370,8) yd2[-1] = ydmax pos = np.nonzero(self.yd >= yd1)[0].max() datestr = ("%i%03i%i%03i" % (self.yr, yd1[pos], self.yr, yd2[pos])) elif fldtype == "CU": self.a_cu_url_9km = 'MODISA/Mapped/Cumulative/4km/chlor/' datestr = max(self._retrieve_datestamps(self.a_cu_url_9km)) else: raise TypeError, "File average type not included" return("%s%s.L3m_%s_%s_%s%s.nc" % (self.fp, datestr, fldtype, self.vc[fld][0], self.res[0], self.vc[fld][1]))
def movie(self): import matplotlib as mpl mpl.rcParams['axes.labelcolor'] = 'white' pl.close(1) pl.figure(1,(8,4.5),facecolor='k') miv = np.ma.masked_invalid figpref.current() jd0 = pl.date2num(dtm(2005,1,1)) jd1 = pl.date2num(dtm(2005,12,31)) mp = projmaps.Projmap('glob') x,y = mp(self.llon,self.llat) for t in np.arange(jd0,jd1): print pl.num2date(t) self.load(t) pl.clf() pl.subplot(111,axisbg='k') mp.pcolormesh(x,y, miv(np.sqrt(self.u**2 +self.v**2)), cmap=cm.gist_heat) pl.clim(0,1.5) mp.nice() pl.title('%04i-%02i-%02i' % (pl.num2date(t).year, pl.num2date(t).month, pl.num2date(t).day), color='w') pl.savefig('/Users/bror/oscar/norm/%03i.png' % t, bbox_inches='tight',facecolor='k',dpi=150)
def _timeparams(self, **kwargs): """Calculate time parameters from given values""" for key in kwargs.keys(): self.__dict__[key] = kwargs[key] if "date" in kwargs: self.jd = pl.datestr2num(kwargs['date']) self.jd = int(self.jd) if self.jd == int(self.jd) else self.jd elif ('yd' in kwargs) & ('yr' in kwargs): if self.yd < 1: self.yr = self.yr -1 ydmax = (pl.date2num(dtm(self.yr, 12, 31)) - pl.date2num(dtm(self.yr, 1, 1))) + 1 self.yd = ydmax + self.yd self.jd = self.yd + pl.date2num(dtm(self.yr,1,1)) - 1 elif ('yr' in kwargs) & ('mn' in kwargs): if not "dy" in kwargs: kwargs["dy"] = 1 setattr(self, "dy", 1) self.jd = pl.date2num(dtm(self.yr,self.mn,self.dy)) elif not 'jd' in kwargs: if hasattr(self, 'defaultjd'): self.jd = self.defaultjd else: raise KeyError, "Time parameter missing" if hasattr(self,'hourlist'): dd = self.jd-int(self.jd) ddlist = np.array(self.hourlist).astype(float)/24 ddpos = np.argmin(np.abs(ddlist-dd)) self.jd = int(self.jd) + ddlist[ddpos] if self.jd < self.fulltvec.min(): raise ValueError, "Date before first available model date" if self.jd > self.fulltvec.max(): raise ValueError, "Date after last available model date" self._jd_to_dtm()
def uvmat(self): hsmat = np.zeros ([20]+list(self.llat.shape)).astype(np.int16) jd1 = pl.date2num(dtm(2003,1,1)) jd2 = pl.date2num(dtm(2009,12,31)) vlist = np.linspace(0,1.5,21) for jd in np.arange(jd1,jd2+1): print pl.num2date(jd) self.load(jd=jd) uv = np.sqrt(self.u**2 + self.v**2) for n,(v1,v2) in enumerate(zip(vlist[:-1],vlist[1:])): msk = (uv>=v1) & (uv<v2) hsmat[n,msk] += 1 return hsmat
def add_jds(self): self.jds = [] files = glob.glob(self.satdir + "/*remap") for f in files: fp = f.split('/')[-1] self.jds.append(pl.date2num( dtm(int(fp[1:5]),1,1))+int(fp[5:8])-1)
def _jd_to_dtm(self): dtobj = pl.num2date(self.jd) njattrlist = ['yr', 'mn', 'dy', 'hr', 'min', 'sec'] dtattrlist = ['year','month','day','hour','minute', 'second'] for njattr,dtattr in zip(njattrlist, dtattrlist): setattr(self, njattr, getattr(dtobj, dtattr)) self.yd = self.jd - pl.date2num(dtm(self.yr,1,1)) + 1
def __init__(self, projname, casename=None, **kwargs): super(Trm, self).__init__(projname, casename, **kwargs) if not hasattr(self, "trmdir"): self.trmdir = os.getenv("TRMDIR") if self.trmdir is None: raise EnvironmentError, """ Trmdir is not set. Add TRMDIR=/path/to/tracmass to your local environment or specify trmdir when calling Trm.""" def parse(od, pn, cn, sfx): gridfile = "/%s/projects/%s/%s_%s.in" % (od, pn, cn, sfx) if not os.path.isfile(gridfile): raise IOError("Can't find the file %s" % gridfile) return nlt.parse(gridfile) self.nlgrid = parse(self.trmdir, self.projname, self.projname, "grid") self.nlrun = parse(self.trmdir, self.projname, self.casename, "run") if not hasattr(self, "datadir"): self.datadir = self.nlrun.outDataDir if not hasattr(self, "datafile"): self.datafile = self.nlrun.outDataFile self.base_iso = pl.date2num(dtm(self.nlgrid.baseYear, self.nlgrid.baseMon, self.nlgrid.baseDay)) - 1 self.imt = self.nlgrid.IMT self.jmt = self.nlgrid.JMT
def load(self,field='nLw_645',jd=0,yr=0,mn=1,dy=1): if jd!=0: yr = pl.num2date(jd).year mn = pl.num2date(jd).month dy = pl.num2date(jd).day yd = jd-pl.date2num(dtm(yr,1,1)) + 1 t = int(np.floor(np.modf(jd)[0]*8)) elif yr!=0: jd = pl.date2num(yr,mn,dy) md = jd - pl.date2num(dtm(1992,10,05)) files = glob.glob(self.satdir + "A%i%03i_*.l2_remap" % (yr,yd) ) sz = [] for f in files: self.read(f,field) sz.append(len( self.__dict__[field][~np.isnan(self.__dict__[field])])) print sz self.read(files[np.nonzero(np.array(sz)==max(sz))[0][0]],field)
def mission_min(): i1 = 0 i2 = None j1 = 0 j2 = None t1 = pl.date2num(dtm(2003,1,1)) t2 = pl.date2num(dtm(2010,12,31)) ns = nasa() MCmin = np.zeros((12,ns.lat.shape[0],ns.lat.shape[1])) * 999 for jd in np.arange(t1,t2): ns.load('chl',jd=jd) mn = pl.num2date(jd).month-1 MCmin[mn,:,:] = np.nanmin( np.vstack( (MCmin[mn:+1,:,:],ns.chl[np.newaxis,:,:]) ), axis=0) print jd,pl.num2date(jd).year,mn return MCmin
def load(self,fldname,jd=0,yr=0,mn=1,dy=1,hr=3): """ Load Cali Current fields for a given day""" i1=self.i1; i2=self.i2; j1=self.j1; j2=self.j2 if jd!=0: yr = pl.num2date(jd).year mn = pl.num2date(jd).month dy = pl.num2date(jd).day hr = pl.num2date(jd+0.125).hour mi = pl.num2date(jd).minute se = pl.num2date(jd).second elif yr!=0: jd = pl.date2num(dtm(yr,mn,dy,hr)) yd = jd - pl.date2num(dtm(yr,1,1)) + 1 filename = "/mon%s%04i%02i.dat" % (self.fieldpref[fldname], yr, mn) self.fld = self.read_bin(self.datadir + filename) self.fld[self.fld>1e6] = np.nan self.fld[self.fld==-9999]=np.nan self.__dict__[fldname] = self.fld
def default_params(convert=False): param_dict = {} # default integer parameters param_dict.update({'SCATMOD' : 0}) param_dict.update({'ADV_DIM' : 2}) param_dict.update({'ADV_OPT' : 2}) param_dict.update({'DO_CHECK_FINAL' : 1}) param_dict.update({'DO_CHECK_PROG' : 1}) param_dict.update({'DO_CHECK_INIT' : 1}) param_dict.update({'STEADY' : 1}) param_dict.update({'DO_BREAKING' : 1}) param_dict.update({'DO_ATTEN' : 1}) # default real parameters: duration_hours = 6. param_dict.update({'duration' : duration_hours*60*60}) param_dict.update({'young' : 5.49e9}) param_dict.update({'drag_rp' : 13.0}) param_dict.update({'viscoelastic_ws' : 0.0}) param_dict.update({'CFL' : 0.7}) # other integer params param_dict.update({"BRK_OPT" :1}) param_dict.update({"FSD_OPT" :1}) param_dict.update({"REF_HS_ICE" :1}) param_dict.update({"USE_ICE_VEL" :0}) # init cons param_dict.update({"Hs_init" :3.}) param_dict.update({"T_init" :12.}) param_dict.update({"mwd_init" :-90.}) param_dict.update({"conc_init" :.7}) param_dict.update({"h_init" :1.}) param_dict.update({"Dmax_init" :300.}) # start time param_dict.update({'start_time':dtm(2015,1,1)}) # diagnostics param_dict.update({"itest" :-1}) param_dict.update({"jtest" :-1}) param_dict.update({"dumpfreq" :10}) if convert: return param_dict2vec(param_dict) else: return param_dict
def param_dict2vec(param_dict): param_vec = [] # old int_prams param_vec.append(param_dict["SCATMOD"]) param_vec.append(param_dict["ADV_DIM"]) param_vec.append(param_dict["ADV_OPT"]) param_vec.append(param_dict["BRK_OPT"]) param_vec.append(param_dict["STEADY"]) param_vec.append(param_dict["DO_ATTEN"]) param_vec.append(param_dict["DO_CHECK_FINAL"]) param_vec.append(param_dict["DO_CHECK_PROG"]) param_vec.append(param_dict["DO_CHECK_INIT"]) # old real_prams param_vec.append(param_dict["young"]) param_vec.append(param_dict["drag_rp"]) param_vec.append(param_dict['viscoelastic_ws']) param_vec.append(param_dict["duration"]) param_vec.append(param_dict["CFL"]) # other integers param_vec.append(param_dict["FSD_OPT"]) param_vec.append(param_dict["REF_HS_ICE"]) param_vec.append(param_dict["USE_ICE_VEL"]) # init cons param_vec.append(param_dict["Hs_init"]) param_vec.append(param_dict["T_init"]) param_vec.append(param_dict["mwd_init"]) param_vec.append(param_dict["conc_init"]) param_vec.append(param_dict["h_init"]) param_vec.append(param_dict["Dmax_init"]) # start time from datetime import datetime as dtm t0 = param_dict['start_time'] reftime = dtm(1900,1,1) tdiff = t0-reftime model_day = tdiff.days model_seconds = tdiff.total_seconds()-24*3600*model_day param_vec.append(model_day) param_vec.append(model_seconds) # diagnostics param_vec.append(param_dict["itest"]) param_vec.append(param_dict["jtest"]) param_vec.append(param_dict["dumpfreq"]) return np.array(param_vec)
def dchl_dt_time(self): i1 = 0 i2 = 230 j1 = 275 j2 = 600 t1 = pl.date2num(dtm(2009,1,1)) t2 = pl.date2num(dtm(2009,12,31)) mat = np.zeros( (t2-t1,i2-i1,j2-j1) ) nw = pycdf.CDF('dchl2009.nc',pycdf.NC.WRITE|pycdf.NC.CREATE) nw.title = 'DChl/Dt' nw.automode() time = nw.def_dim('time',t2-t1) lat = nw.def_dim('Latitude',i2-i1) lon = nw.def_dim('Longitude',j2-j1) dchl = nw.def_var('DChlDt', pycdf.NC.FLOAT, (time, lat,lon)) chl = nw.def_var('Chl', pycdf.NC.FLOAT, (time, lat,lon)) u = nw.def_var('u', pycdf.NC.FLOAT, (time, lat,lon)) v = nw.def_var('v', pycdf.NC.FLOAT, (time, lat,lon)) latD = nw.def_var('latitude', pycdf.NC.FLOAT, (lat,)) lonD = nw.def_var('longitude', pycdf.NC.FLOAT, (lon,)) timD = nw.def_var('time', pycdf.NC.FLOAT, (time,)) latD[:] = self.lat[i1:i2].astype(np.float32) lonD[:] = self.lon[j1:j2].astype(np.float32) timD[:] = np.arange(t1,t2).astype(np.float32) fld1 = self.loadL3(t1)[i1:i2,j1:j2] for n,t in enumerate(np.arange(t1+1,t2)): fld2 = self.loadL3(t)[i1:i2,j1:j2] self.load(t-1) dchl[n,:,:] = (fld2-fld1).astype(np.float32) chl[n,:,:] = (fld1).astype(np.float32) u[n,:,:] = (self.u).astype(np.float32)[i1:i2,j1:j2] v[n,:,:] = (self.v).astype(np.float32)[i1:i2,j1:j2] fld1 = fld2 print t,n nw.close()
def __init__(self,datadir = "/projData/CORE2/"): self.jdbase = pl.date2num(dtm(1948,1,1))+15 self.datadir = datadir filename = "u_10.2005.05APR2010.nc" try: n = netCDF4.Dataset(datadir + filename) except: print 'Error opening the gridfile %s' % datadir + filename raise self.lat = n.variables['LAT'][:] self.gmt = gmtgrid.Shift(n.variables['LON'][:].copy()) self.lon = self.gmt.lonvec self.llon,self.llat = np.meshgrid(self.lon,self.lat) n.close()
def loadL3(self, jd=732281.0, mc=0, fld="chl"): if not hasattr(self, "gcmi"): self.create_satijvecs() if mc != 0: self.ns.load(fld, fldtype="MC", mc=mc) else: yr = pl.num2date(jd).year yd = jd - pl.date2num(dtm(pl.num2date(jd).year, 1, 1)) + 1 self.ns.load(fld, yr=yr, yd=yd) cnt = self.llat[:] * 0 fld = self.llat[:] * 0 msk = ~np.isnan(np.ravel(self.ns.chl)) for gj, gi, sj, si in np.vstack((self.satj[msk], self.sati[msk], self.gcmj[msk], self.gcmi[msk])).T: fld[gj, gi] += self.ns.chl[sj, si] cnt[gj, gi] += 1 fld = fld / cnt return fld
def load(self, jd): yr = pl.num2date(jd).year yd = int((jd - pl.date2num(dtm(yr, 1, 1))) * 4) ufile = "uwnd.sig995.%04i.nc" % yr vfile = "vwnd.sig995.%04i.nc" % yr try: un = pycdf.CDF(self.datadir + ufile) except: print 'Error opening the windfile %s' % datadir + ufile raise try: vn = pycdf.CDF(self.datadir + vfile) except: print 'Error opening the windfile %s' % datadir + vfile raise u = un.var('uwnd')[yd, :, :] * 0.01 + 225.45 v = vn.var('vwnd')[yd, :, :] * 0.01 + 225.45 nwnd = gmtgrid.convert(np.sqrt(u**2 + v**2), self.gr) nwnd[nwnd > 200] = np.nan return nwnd
def load(self,jd): yr = pl.num2date(jd).year yd = int((jd - pl.date2num(dtm(yr,1,1))) * 4) ufile = "uwnd.sig995.%04i.nc" % yr vfile = "vwnd.sig995.%04i.nc" % yr try: un = pycdf.CDF(self.datadir + ufile) except: print 'Error opening the windfile %s' % datadir + ufile raise try: vn = pycdf.CDF(self.datadir + vfile) except: print 'Error opening the windfile %s' % datadir + vfile raise u = un.var('uwnd')[yd,:,:] * 0.01 + 225.45 v = vn.var('vwnd')[yd,:,:] * 0.01 + 225.45 nwnd = gmtgrid.convert(np.sqrt(u**2 + v**2),self.gr) nwnd[nwnd>200]=np.nan return nwnd
def __init__(self, projname, casename=None, **kwargs): super(Trm, self).__init__(projname, casename, **kwargs) self._arglist = ['ints0','part','rank','arg1','arg2'] self._argdict = {k:None for k in self._arglist} if not hasattr(self, 'trmdir'): self.trmdir = os.getenv('TRMDIR') if self.trmdir is None: raise EnvironmentError, """ Trmdir is not set. Add TRMDIR=/path/to/tracmass to your local environment or specify trmdir when calling Trm.""" projdir = os.path.join(self.trmdir, "projects", self.projname) self.nlgrid = nlt.Namelist() if os.path.isfile(os.path.join(projdir, "%s.in" % self.casename)): self.nlgrid.read("%s/%s.in" % (projdir, self.projname)) self.nlgrid.read("%s/%s.in" % (projdir, self.casename)) else: self.nlgrid.read("%s/%s_grid.in" % (projdir, self.projname)) self.nlgrid.read("%s/%s_run.in" % (projdir, self.casename)) self.nlrun = self.nlgrid self.filepref = (self.nlrun.outdatafile if len(self.nlrun.outdatafile)>0 else self.casename) if not hasattr(self, "ftype"): ftype = ['xxx','asc','bin','csv'] self.ftype = ftype[self.nlrun.twritetype] self._generate_datadir(kwargs) self.base_iso = pl.date2num(dtm( self.nlgrid.baseyear, self.nlgrid.basemon, self.nlgrid.baseday))-1 self.imt = self.nlgrid.imt self.jmt = self.nlgrid.jmt self.gen_filelists()
def run(): res = 0 fig_pth = pth('..', 'picture') ## NDU # ''' start_dtm = dtm(2021, 4, 1, 0, 0, 0) final_dtm = dtm(2021, 4, 6, 0, 0, 0) path = pth('..', 'data', 'Lidar_NDU', 'use') reader = NDU.reader(path, start_dtm, final_dtm, reset=res) # dt = reader.get_data(dtm(2021,4,2,0,0,0),dtm(2021,4,5,0,0,0)) dt = reader.get_data() plot.plot_all(dt, fig_pth, tick_freq='24h') # ''' ## SSC # ''' start_dtm = dtm(2021, 4, 1) final_dtm = dtm(2021, 4, 6) path = pth('..', 'data', 'Lidar_SSC') reader = SSC.reader(path, start_dtm, final_dtm, reset=res) # dt = reader.get_data(dtm(2021,4,2,0,0,0),dtm(2021,4,5,0,0,0)) dt = reader.get_data() plot.plot_all(dt, fig_pth, tick_freq='24h') # ''' ## RCEC # ''' start_dtm = dtm(2021, 4, 1) final_dtm = dtm(2021, 4, 6) path = pth('..', 'data', 'Lidar_RCEC') reader = RCEC.reader(path, start_dtm, final_dtm, reset=res) # dt = reader.get_data(dtm(2021,4,2,0,0,0),dtm(2021,4,5,0,0,0)) dt = reader.get_data() plot.plot_all(dt, fig_pth, tick_freq='24h') # ''' ## TORI # ''' start_dtm = dtm(2021, 4, 1) final_dtm = dtm(2021, 4, 6) path = pth('..', 'data', 'Lidar_TORI') reader = TORI.reader(path, start_dtm, final_dtm, reset=res) # dt = reader.get_data(dtm(2021,4,2,0,0,0),dtm(2021,4,5,0,0,0)) dt = reader.get_data() plot.plot_all(dt, fig_pth, tick_freq='24h') # ''' ## GRIMM ''' start_dtm = dtm(2021,4,1) final_dtm = dtm(2021,4,4) path = pth('..','data','GRIMM') grimm = GRIMM.reader(path,start_dtm,final_dtm,reset=False) # dt = grimm.get_data() # dt = grimm.plot(pth('..','picture'),dtm(2021,4,2,6),dtm(2021,4,2,18),tick_freq='2h',mean_freq='6T') dt = grimm.plot(pth('..','picture'),start_dtm,final_dtm,tick_freq='12h',mean_freq='30T') # ''' ## WXT ''' start_dtm = dtm(2021,4,1) final_dtm = dtm(2021,4,5) path = pth('..','data','WXT') wxt = WXT.reader(path,start_dtm,final_dtm,reset=False) dt = wxt.get_data() dt = wxt.plot(pth('..','picture'),dtm(2021,4,3,12),dtm(2021,4,4,16),tick_freq='6h',mean_freq='30T') # ''' return dt
import numpy as np from datetime import datetime as dtm from datetime import timedelta as td import time import os import sys from multiprocessing import Pool sys.path.append("bin") import MCPath today = (24,2,2020) todaydt = dtm(2020,2,24) num = 250000 steps = 366 tenor = steps/365 ir_type = 1 ir_term = np.array([0,31,94,182,276,367]) ir_data = np.array([2.61,2.61,2.76,2.79,2.81,2.83])/100 ir_dc = 0 d_type = 0 d_term = np.array([0]) d_data = np.array([0.01]) d_dc = 0 v_type = 1 v_term = np.array([1,30,63,92,124,154,183,215,245,274,306,336,366])
def create_tvec(self,lnmsk='',dtmsk=''): self.tvec = np.arange(pl.date2num(dtm(2003,1,1)), pl.date2num(dtm(2006,12,31))) self.tvec = self.tvec[self.t1:self.t2]
def __init__(self,projname, casename="", datadir="", datafile="", ormdir=""): self.projname = projname if len(casename) == 0: self.casename = projname else: self.casename = casename self.datadir = datadir if ormdir: self.ormdir = ormdir else: self.ormdir = os.getenv('ORMDIR') self.isobase = datetime.datetime(2004,1,1) self.conn = psycopg2.connect (host="localhost", database="partsat") self.c = self.conn.cursor() self.tablename = ("%s%s" % (projname ,casename)).lower() self.nlgrid = nlt.parse('/%s/projects/%s/%s_grid.in' % (self.ormdir,self.projname,self.projname)) self.nlrun = nlt.parse('/%s/projects/%s/%s_run.in' % (self.ormdir,self.projname,self.casename)) if datadir: self.datadir = datadir else: self.datadir = self.nlrun.outDataDir if datafile: self.datafile = datafile else: self.datafile=self.nlrun.outDataFile self.base_iso = pl.date2num(dtm( self.nlgrid.baseYear, self.nlgrid.baseMon, self.nlgrid.baseDay))-1 self.imt = self.nlgrid.IMT self.jmt = self.nlgrid.JMT if projname == 'oscar': import oscar self.gcm = oscar.Oscar() elif projname=="topaz": griddir = '/projData/TOPAZ/1yr_1d/' gridname = '/22450101.ocean_daily.nc' g = pycdf.CDF(griddir + gridname) lon = g.var('xu_ocean')[:] lat = g.var('yu_ocean')[:] #self.lon[self.lon<-180] = self.lon[self.lon<-180] + 360 self.llon,self.llat = np.meshgrid(lon, lat) elif projname=="casco": import casco self.gcm = casco.GCM() self.region = "casco" self.base_iso = pl.date2num(dtm(2004,1,1)) elif projname=="gompom": n = pycdf.CDF(griddir + 'grid.cdf') self.llon = n.var('x')[:] self.llat = n.var('y')[:] self.base_iso = pl.date2num(dtm(2004,1,1)) elif projname=="jplSCB": import jpl self.gcm = jpl.SCB() self.region = "scb" self.base_iso = pl.date2num(dtm(2001,1,1))-3./24 elif projname=="jplNow": import jpl self.gcm = jpl.NOW() self.region = "scb" elif projname=="rutgersNWA": import rutgers self.gcm = rutgers.NWA() self.region = "nwa_small" if hasattr(self,'gcm'): self.gcm.add_landmask() self.landmask = self.gcm.landmask self.llon = self.gcm.llon self.llat = self.gcm.llat
def create_bill_(request, c): def item_error(message, product): return JsonError(message + " " + _("(Item" + ": ") + product.name + ")") # check permissions if not has_permission(request.user, c, "bill", "edit"): return JsonError(_("You have no permission to create bills")) # get data data = JsonParse(request.POST.get("data")) if not data: return JsonError(_("No data received")) # see if we're updating an existing bill existing_bill = None try: existing_bill = Bill.objects.get(company=c, id=int(data.get("id"))) if existing_bill.status == g.PAID: return JsonError(_("This bill has already been paid, editing is not possible")) except (ValueError, TypeError): pass except Bill.DoesNotExist: pass # current company (that will be fixed on this bill forever): # save a FK to BillCompany; the last company is the current one bill_company = BillCompany.objects.filter(company=c).order_by("-datetime_created")[0] # current register: get BillRegister with the same id as current one try: bill_registers = BillRegister.objects.filter(register__id=int(data.get("register_id"))) if len(bill_registers) > 0: bill_register = bill_registers[0] else: raise Register.DoesNotExist except (TypeError, ValueError, Register.DoesNotExist): return JsonError(_("Invalid register specified.")) # current contact: get BillContact with the same id as the requested contact if data.get("contact"): try: bill_contacts = BillContact.objects.get(contact__id=int(data.get("contact").get("id"))).order_by( "datetime_created" ) if len(bill_contacts) > 0: bill_contact = bill_contacts[0] else: raise Contact.DoesNotExist except (Contact.DoesNotExist, ValueError, TypeError): return JsonError(_("Invalid contact")) else: bill_contact = None # save all validated stuff in bill to a dictionary and insert into database at the end # prepare data for insert bill = { "company": c, "issuer": bill_company, "register": bill_register, "contact": bill_contact, "user_id": request.user.id, "user_name": str(request.user), "notes": data.get("notes", "")[: max_field_length(Bill, "notes")], "type": g.CASH, "status": g.WAITING, "items": [], "currency": get_company_value(request.user, c, "pos_currency"), # numbers... "base": Decimal(0), "discount": Decimal(0), "tax": Decimal(0), "total": Decimal(0), "created_by": request.user, } # timestamp try: # timestamp: send in an array of number: # [year, month, day, hour, minute, second] tn = [int(n) for n in data.get("timestamp")] bill["timestamp"] = dtm(year=tn[0], month=tn[1], day=tn[2], hour=tn[3], minute=tn[4], second=tn[5]) except (ValueError, TypeError): return JsonError(_("Invalid timestamp")) r = parse_decimal(request.user, c, data.get("total")) if not r["success"] or r["number"] <= Decimal("0"): return JsonError(_("Invalid grand total value")) else: bill["total"] = r["number"] # validate items for i in data.get("items"): # get product try: product = Product.objects.get(company=c, id=int(i.get("product_id"))) except Product.DoesNotExist: return JsonError(_("Product with this id does not exist") + " (id=" + i.get("product_id") + ")") # parse quantity r = parse_decimal(request.user, c, i.get("quantity"), g.DECIMAL["quantity_digits"]) if not r["success"]: return item_error(_("Invalid quantity value"), product) else: if r["number"] <= Decimal("0"): return item_error(_("Cannot add an item with zero or negative quantity"), product) quantity = r["number"] # remove from stock; TODO: check negative quantities (?) # actually we leave negative quantities as they are or # when stock is empty, we leave it at 0 product.destockify(quantity) product.save() item = { "created_by": request.user, "code": product.code, "shortcut": product.shortcut, "name": product.name, "description": product.description, "private_notes": product.private_notes, "unit_type": product.get_unit_type_display(), # ! display, not the 'code' "stock": product.stock, # 'bill': not now, after bill is saved "product_id": product.id, "bill_notes": i.get("bill_notes"), "discounts": [], # validated discounts (FK in database) # prices: will be calculated when discounts are ready "base": None, "quantity": None, "tax_rate": None, "batch": None, "discount": None, "net": None, "tax": None, "total": None, } bill["items"].append(item) for d in i["discounts"]: # check: # discount id: if it's -1, it's a unique discount on this item; # if it's anything else, the discount must belong to this company # and must be active and enabled d_id = int(d.get("id")) if d_id != -1: try: dbd = Discount.objects.get(id=d_id, company=c) if not dbd.is_active: return item_error(_("The discount is not active"), product) except Discount.DoesNotExist: return item_error(_("Chosen discount does not exist or is not valid"), product) # amount: parse number and check that percentage does not exceed 100% r = parse_decimal(request.user, c, d.get("amount")) if not r["success"]: return item_error(_("Invalid discount amount"), product) else: d_amount = r["number"] if d_amount < Decimal(0) or (d.get("type") == "Relative" and d_amount > Decimal(100)): return item_error(_("Invalid discount amount"), product) # save data to bill discount = { "id": d_id, "code": d.get("code"), "description": d.get("description"), "type": d.get("type"), "amount": d_amount, } item["discounts"].append(discount) # save this item's prices to item's dictionary (will go into database later) try: item["base"] = parse_decimal_exc(request.user, c, i.get("base"), message=_("Invalid base price")) item["quantity"] = parse_decimal_exc(request.user, c, i.get("quantity"), message=_("Invalid quantity")) item["tax_rate"] = parse_decimal_exc(request.user, c, i.get("tax_rate"), message=_("Invalid tax rate")) item["batch"] = parse_decimal_exc(request.user, c, i.get("batch"), message=_("Invalid batch price")) item["discount"] = parse_decimal_exc( request.user, c, i.get("discount"), message=_("Invalid discount amount") ) item["net"] = parse_decimal_exc(request.user, c, i.get("net"), message=_("Invalid net price")) item["tax"] = parse_decimal_exc(request.user, c, i.get("tax"), message=_("Invalid tax amount")) item["total"] = parse_decimal_exc(request.user, c, i.get("total"), message=_("Invalid total")) bill["base"] += item["batch"] bill["discount"] += item["discount"] bill["tax"] += item["tax"] except ValueError as e: return item_error(e.message, product) # at this point, everything is fine, insert into database if existing_bill: existing_bill.delete() bill_payment = Payment( type=g.CASH, total=bill["total"], currency=get_company_value(request.user, c, "pos_currency"), transaction_datetime=datetime.utcnow(), status=g.WAITING, created_by=request.user, ) bill_payment.save() # create a new bill db_bill = Bill( created_by=request.user, company=c, # current company, FK to Company object issuer=bill["issuer"], # fixed company details at this moment, FK to BillCompany object user_id=bill["user_id"], # id of user that created this bill, just an integer, not a FK user_name=bill["user_name"], # copied user name in case that user gets 'fired' register=bill["register"], # current settings of the register this bill was created on contact=bill["contact"], # FK on BillContact, copy of the Contact object notes=bill["notes"], # timestamp=dtm.utcnow().replace(tzinfo=timezone(get_company_value(request.user, c, 'pos_timezone'))), timestamp=bill["timestamp"], payment=bill_payment, base=bill["base"], discount=bill["discount"], tax=bill["tax"], ) db_bill.save() # create new items for item in bill["items"]: db_item = BillItem( created_by=item["created_by"], code=item["code"], shortcut=item["shortcut"], name=item["name"], description=item["description"], private_notes=item["private_notes"], unit_type=item["unit_type"], bill=db_bill, bill_notes=item["bill_notes"], product_id=item["product_id"], quantity=item["quantity"], base=item["base"], tax_rate=item["tax_rate"], batch=item["batch"], discount=item["discount"], net=item["net"], tax=item["tax"], total=item["total"], ) db_item.save() # save discounts for this item for discount in item["discounts"]: db_discount = BillItemDiscount( created_by=request.user, bill_item=db_item, description=discount["description"], code=discount["code"], type=discount["type"], amount=discount["amount"], ) db_discount.save() db_bill.save() d = {"bill": bill_to_dict(request.user, c, db_bill)} return JsonOk(extra=d)
def __init__(self, projname, casename=None, **kwargs): super(Trm, self).__init__(projname, casename, **kwargs) self._arglist = ['ints0','part','rank','arg1','arg2'] self._argdict = {k:None for k in self._arglist} if not hasattr(self, 'trmdir'): self.trmdir = os.getenv('TRMDIR') if self.trmdir is None: raise EnvironmentError, """ Trmdir is not set. Add TRMDIR=/path/to/tracmass to your local environment or specify trmdir when calling Trm.""" projdir = os.path.join(self.trmdir, "projects", self.projname) self.nlgrid = nlt.Namelist() if os.path.isfile(os.path.join(projdir, "%s.in" % self.casename)): self.nlgrid.read("%s/%s.in" % (projdir, self.projname)) self.nlgrid.read("%s/%s.in" % (projdir, self.casename)) else: self.nlgrid.read("%s/%s_grid.in" % (projdir, self.projname)) self.nlgrid.read("%s/%s_run.in" % (projdir, self.casename)) self.nlrun = self.nlgrid self.filepref = (self.nlrun.outdatafile if len(self.nlrun.outdatafile)>0 else self.casename) if not hasattr(self, "ftype"): ftype = ['xxx','asc','bin','csv'] self.ftype = ftype[self.nlrun.twritetype] if not hasattr(self, 'datadir'): if len(self.nlrun.outdatadir) > 0: self.datadir = self.nlrun.outdatadir elif os.getenv("TRMOUTDATADIR") is not None: self.datadir = os.path.join(os.getenv("TRMOUTDATADIR"), self.projname) else: self.datadir = "" if not os.path.isabs(self.datadir): self.datadir = os.path.join(self.trmdir, self.datadir) if getattr(self.nlrun, "outdircase", False): self.datadir = os.path.join(self.datadir, self.casename) if getattr(self.nlrun, "outdirdate", False): if type(kwargs.get('startdate')) is float: self.jdstart = kwargs['startdate'] elif type(kwargs.get('startdate')) is int: self.jdstart = kwargs['startdate'] + 0.5 elif type(kwargs.get('startdate')) is str: self.jdstart = pl.datestr2num(kwargs['startdate']) else: self.jdstart = pl.datestr2num("%04i%02i%02i-%02i%02i" % ( self.nlrun.startyear, self.nlrun.startmon, self.nlrun.startday, self.nlrun.starthour, self.nlrun.startmin)) self.outdirdatestr = pl.num2date(self.jdstart).strftime("%Y%m%d-%H%M") self.datadir = os.path.join(self.datadir, self.outdirdatestr) else: self.outdirdatestr = "" if self.nlrun.outdatafile: self.datafilepref = os.path.join(self.datadir, self.nlrun.outdatafile) else: self.datafilepref = os.path.join(self.datadir, self.casename) self.base_iso = pl.date2num(dtm( self.nlgrid.baseyear, self.nlgrid.basemon, self.nlgrid.baseday))-1 self.imt = self.nlgrid.imt self.jmt = self.nlgrid.jmt self.gen_filelists()
def parseLogLines(lines): stats = [] header = ["Time"] data = {} time_regex = "(\d+)\/(\d+)\/(\d+)\s+(\d+)\:(\d+)\:(\d+)" curr_time = "" # going through all lines in log for line in lines: result = re.search(time_regex, line) # current line is a timestamp if result: ctg = result.groups() curr_time = dtm(int(ctg[2]), int(ctg[1]), int(ctg[0]), int(ctg[3]), int(ctg[4]), int(ctg[5])) if data: #print(stats[-1]) if curr_time != data["Time"]: data = {"Time": curr_time, "Comments": ""} else: data = {"Time": curr_time, "Comments": ""} elif "AVISO 1" in line or "AVISO SISTEMA NORMALIZADO gc" in line and curr_time: stats[-1]["Comments"] = line.replace("[MONITOR] ", "") elif "AVISO" in line and curr_time: data["Comments"] = line.replace("[MONITOR] ", "") # current line has a value to be stored elif ":" in line and curr_time: l = line.replace("[MONITOR] ", "").split(":") k = l[0].strip() v = l[1].strip() if k not in header: header.append(k) if "-" in v: data[k] = float(v.split("-")[0]) else: data[k] = float(v.split("%")[0]) if k == "- Quantidade HTTP": stats.append(data) '''if "Heap %" in data: stats.append(data) else: stats[-1].update(data)''' # current line has a value to be stored elif "%" in line and curr_time: if "heap" in k: if "Heap %" not in header: header.append("Heap %") data["Heap %"] = float( line.replace("[MONITOR] ", "").split("%")[0]) elif "perm" in k: if "Perm %" not in header: header.append("Perm %") data["Perm %"] = float( line.replace("[MONITOR] ", "").split("%")[0]) elif "Garbage" in k: if "Garbage %" not in header: header.append("Garbage %") data["Garbage %"] = float( line.replace("[MONITOR] ", "").split("%")[0]) header.append("Comments") return stats, header
def __init__(self, projname, casename=None, **kwargs): super(Trm, self).__init__(projname, casename, **kwargs) self._arglist = ['ints0', 'part', 'rank', 'arg1', 'arg2'] self._argdict = {k: None for k in self._arglist} if not hasattr(self, 'trmdir'): self.trmdir = os.getenv('TRMDIR') if self.trmdir is None: raise EnvironmentError, """ Trmdir is not set. Add TRMDIR=/path/to/tracmass to your local environment or specify trmdir when calling Trm.""" projdir = os.path.join(self.trmdir, "projects", self.projname) self.nlgrid = nlt.Namelist() if os.path.isfile(os.path.join(projdir, "%s.in" % self.casename)): self.nlgrid.read("%s/%s.in" % (projdir, self.projname)) self.nlgrid.read("%s/%s.in" % (projdir, self.casename)) else: self.nlgrid.read("%s/%s_grid.in" % (projdir, self.projname)) self.nlgrid.read("%s/%s_run.in" % (projdir, self.casename)) self.nlrun = self.nlgrid self.filepref = (self.nlrun.outdatafile if len(self.nlrun.outdatafile) > 0 else self.casename) if not hasattr(self, "ftype"): ftype = ['xxx', 'asc', 'bin', 'csv'] self.ftype = ftype[self.nlrun.twritetype] if not hasattr(self, 'datadir'): if len(self.nlrun.outdatadir) > 0: self.datadir = self.nlrun.outdatadir elif os.getenv("TRMOUTDATADIR") is not None: self.datadir = os.path.join(os.getenv("TRMOUTDATADIR"), self.projname) else: self.datadir = "" if not os.path.isabs(self.datadir): self.datadir = os.path.join(self.trmdir, self.datadir) if getattr(self.nlrun, "outdircase", False): self.datadir = os.path.join(self.datadir, self.casename) if getattr(self.nlrun, "outdirdate", False): if type(kwargs.get('startdate')) is float: self.jdstart = kwargs['startdate'] elif type(kwargs.get('startdate')) is int: self.jdstart = kwargs['startdate'] + 0.5 elif type(kwargs.get('startdate')) is str: self.jdstart = pl.datestr2num(kwargs['startdate']) else: self.jdstart = pl.datestr2num( "%04i%02i%02i-%02i%02i" % (self.nlrun.startyear, self.nlrun.startmon, self.nlrun.startday, self.nlrun.starthour, self.nlrun.startmin)) self.outdirdatestr = pl.num2date( self.jdstart).strftime("%Y%m%d-%H%M") self.datadir = os.path.join(self.datadir, self.outdirdatestr) else: self.outdirdatestr = "" if self.nlrun.outdatafile: self.datafilepref = os.path.join(self.datadir, self.nlrun.outdatafile) else: self.datafilepref = os.path.join(self.datadir, self.casename) self.base_iso = pl.date2num( dtm(self.nlgrid.baseyear, self.nlgrid.basemon, self.nlgrid.baseday)) - 1 self.imt = self.nlgrid.imt self.jmt = self.nlgrid.jmt self.gen_filelists()
def str2date(self, dtstr): abc = re.findall(r'[a-zA-Z]+', dtstr) if len(abc) == 0: partition = re.findall(r'\d+', dtstr) if len(partition) >= 2: # not pure number'2019-3-3' list = dtstr.split(' ') if len(list) >= 1: delimit = re.findall('[^0-9]', list[0])[0] if isinstance(dtstr, str): if dtstr.replace(' ', '') == dtstr: ret = dtm.strptime(dtstr, f"%Y{delimit}%m{delimit}%d") elif dtstr.replace(' ', '') != dtstr: if len(partition) == 6: ret = dtm.strptime( dtstr, f"%Y{delimit}%m{delimit}%d %H:%M:%S") elif len(partition) == 5: ret = dtm.strptime( dtstr, f"%Y{delimit}%m{delimit}%d %H:%M") elif len( partition ) == 1: # pure number['20190303','20190909232323','201901010101'] if len(dtstr) == 8: yy = int(dtstr[:4]) mm = int(dtstr[4:6]) dd = int(dtstr[6:8]) # print(yy,mm,dd) ret = dtm(yy, mm, dd) elif len(dtstr) == 14: yy = int(dtstr[:4]) mm = int(dtstr[4:6]) dd = int(dtstr[6:8]) h = int(dtstr[8:10]) M = int(dtstr[10:12]) s = int(dtstr[12:]) # print(yy, mm, dd,h,M,s) ret = dtm(yy, mm, dd, h, M, s) elif len(dtstr) == 12: yy = int(dtstr[:4]) mm = int(dtstr[4:6]) dd = int(dtstr[6:8]) h = int(dtstr[8:10]) M = int(dtstr[10:12]) s = 0 # print(yy, mm, dd,h,M,s) ret = dtm(yy, mm, dd, h, M, s) elif len(abc) == 1: partition = re.findall(r'\d+', dtstr) temp = [] for i, j in enumerate(abc): if len(j) == 3: month = [ 'JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC' ] for m, n in enumerate(month): if n == j.upper(): j = m + 1 mm = j break for ptt in partition: if len(ptt) == 4: yy = int(i) elif len(ptt) == 2: dd = int(ptt) ret = dtm(yy, mm, dd) return ret