def data(self,date0=False,date1=False,quiet=True): ''' Returns atm data form all times in basefolder files or between date0 (>=) and date1 (<=) ''' # get data for all times in file: res=interim_file_data(self.files,quiet=quiet) time=res['time'] if date0: date0=dateu.parse_date(date0) i,=np.where(time>=date0) i=i[0] else: i=0 if date1: date1=dateu.parse_date(date1) j,=np.where(time<=date1) j=j[-1] else: j=len(time) if date0 or date1: for k in res.keys(): if k =='time': res[k]=res[k][i:j+1] else: res[k].data=res[k].data[i:j+1,...] return res
def data(self, date0=False, date1=False, quiet=True): ''' Returns atm data form all times in basefolder files or between date0 (>=) and date1 (<=) ''' # get data for all times in file: res = interim_file_data(self.files, quiet=quiet) time = res['time'] if date0: date0 = dateu.parse_date(date0) i, = np.where(time >= date0) i = i[0] else: i = 0 if date1: date1 = dateu.parse_date(date1) j, = np.where(time <= date1) j = j[-1] else: j = len(time) if date0 or date1: for k in res.keys(): if k == 'time': res[k] = res[k][i:j + 1] else: res[k].data = res[k].data[i:j + 1, ...] return res
def gen_clmbry(date,FA='a',nest=0,cf=CONF,quiet=True): date=dateu.parse_date(date) err='' isFatal=False fclm=opt.nameof('in','clm',date=date,FA=FA,nest=nest,cf=cf) fbry=opt.nameof('in','bry',date=date,FA=FA,nest=nest,cf=cf) grd=opt.nameof('in','grd',cf=cf) if os.path.isfile(fclm) and os.path.isfile(fbry): err='CLMBRY files already exists' isFatal=False else: nforec=opt.n_pred(cf) import get_mercator as get_hycom # no need to check if data is ready! if not gen_clm_bry will return error! # anyway, cannot know if hycom data of today is analtsis or forecast!! date1=None if FA=='f': date1=dateu.parse_date(dateu.next_date(date,nforec)) try: err=get_hycom.gen_clm_bry(fclm,fbry,grd,date,date1,quiet=quiet) if err: err='ERROR creating clm bry files : %s' % err isFatal=True except: err='ERROR creating clm bry files' isFatal=True return err, isFatal, fclm,fbry
def clean_nc_files(cf, date1, date2, type, FA, nest=0, mdkeep=0, wkeep=-1, clean=False, quiet=False, output=sys.stdout): ''' mdkeep: day,month to keep ex: mdkeep=1 => keep day 1 of every month ex: mdkeep=(1,1) => keep day 1 of January (month,day) ex: mdkeep=0 => no keep wkeep: day of week to keep: ex: wkeep=0 => keep first day of week (synday) ex: wkeep=-1 => no keep ''' date = date1 while date <= date2: f = opt.nameof('out', type, date, FA, nest, cf) if os.path.isfile(f): hs = cb.hsize(os.path.getsize(f)) y, m, d = dateu.parse_date(date) dweek = dateu.parse_date(date).weekday() if mdkeep == -1: MDkeep = dateu.mndays( y, m) # last day of month, then calc last day: else: MDkeep = mdkeep if MDkeep and MDkeep == d or MDkeep == (m, d): if not quiet: print >> output, '*Keeping ', date, ' ', f, ' ', hs[0], hs[ 1], '**', MDkeep elif wkeep != -1 and wkeep == dweek: if not quiet: print >> output, '*Keeping ', date, ' ', f, ' ', hs[0], hs[ 1], '*', wkeep else: if not quiet: if clean: print >> output, 'Removing ', date, ' ', f, ' ', hs[ 0], hs[1] else: print >> output, 'Not removing ', date, ' ', f, ' ', hs[ 0], hs[1] if clean: os.remove(f) else: print >> output, date, ' ', FA, ' no file' date = dateu.next_date(date, 1)
def dates_info(cf=CONF): ''' Configuration start_date and end_date in the form yyyymmdd ''' start_date, e = get_conf(cf, 'DATES', 'start_date', type=str) end_date, e = get_conf(cf, 'DATES', 'end_date', type=str) start_date = sjoin(start_date.split('-'), '') end_date = sjoin(end_date.split('-'), '') return dateu.parse_date(start_date), dateu.parse_date(end_date)
def dates_info(cf=CONF): ''' Configuration start_date and end_date in the form yyyymmdd ''' start_date,e = get_conf(cf,'DATES','start_date',type=str) end_date,e = get_conf(cf,'DATES','end_date',type=str) start_date = sjoin(start_date.split('-'),'') end_date = sjoin(end_date.split('-'),'') return dateu.parse_date(start_date),dateu.parse_date(end_date)
def op_plt_many(conf, plconf, startDate, endDate, FA=('a', 'f'), **kargs): startDate = dateu.parse_date(startDate) endDate = dateu.parse_date(endDate) date = dateu.next_date(startDate, -1) while endDate > date: date = dateu.next_date(date) for p in FA: errs, names = op_plt(conf, plconf, date, FA=p, **kargs) for e, n in zip(errs, names): print e, n
def op_plt_many(conf,plconf,startDate,endDate,FA=('a','f'),**kargs): startDate=dateu.parse_date(startDate) endDate=dateu.parse_date(endDate) date=dateu.next_date(startDate,-1) while endDate>date: date=dateu.next_date(date) for p in FA: errs,names=op_plt(conf,plconf,date,FA=p,**kargs) for e,n in zip(errs,names): print e,n
def op_plt_wind_rose(conf, plconf, date, FA='a', nest=0, **kargs): closefig = kargs.get('closefig', True) clearfig = kargs.get('clearfig', True) save = kargs.get('save', True) date = dateu.parse_date(date) Err = [] Out = [] pltpath = opt.nameof('out', 'plots', cf=conf) Data, err = opt.get_plconf(plconf, 'WINDR') ifig = -1 for tag in Data['tag']: ifig += 1 args = {} args['ifig'] = ifig err, figs, info = plt_wind_rose(conf, plconf, date, FA, nest, **args) if not err: # save: if save: if tag: Tag = '_' + tag else: Tag = tag tmp = dateu.parse_date(date) Y, M, D = tmp.year, tmp.month, tmp.day Ydest = os.path.join(pltpath, '%s' % Y) if not os.path.isdir(Ydest): os.mkdir(Ydest) MDdest = os.path.join(Ydest, '%02d_%02d' % (M, D)) if not os.path.isdir(MDdest): os.mkdir(MDdest) N = -1 for fig in figs: N += 1 place, day = info[N] savename = 'wind_rose_%s_%s_%d_%s_n%d%s' % ( date, FA, day, place, nest, Tag) savename = os.path.join( MDdest, savename + '.' + Data['extension'][ifig]) Out += [savename] fig.savefig(savename, dpi=fig.dpi) if clearfig: fig.clear() if closefig: pl.close(fig) return Err, Out
def op_plt_wind_rose(conf,plconf,date,FA='a',nest=0,**kargs): closefig = kargs.get('closefig',True) clearfig = kargs.get('clearfig',True) save = kargs.get('save',True) date=dateu.parse_date(date) Err=[] Out=[] pltpath = opt.nameof('out','plots',cf=conf) Data,err = opt.get_plconf(plconf,'WINDR') ifig=-1 for tag in Data['tag']: ifig+=1 args={} args['ifig']=ifig err,figs,info=plt_wind_rose(conf,plconf,date,FA,nest,**args) if not err: # save: if save: if tag: Tag='_'+tag else: Tag=tag tmp=dateu.parse_date(date) Y,M,D=tmp.year,tmp.month,tmp.day Ydest = os.path.join(pltpath,'%s' % Y ) if not os.path.isdir(Ydest): os.mkdir(Ydest) MDdest = os.path.join(Ydest,'%02d_%02d' % (M,D) ) if not os.path.isdir(MDdest): os.mkdir(MDdest) N=-1 for fig in figs: N+=1 place,day=info[N] savename='wind_rose_%s_%s_%d_%s_n%d%s' % (date,FA,day,place,nest,Tag) savename=os.path.join(MDdest,savename+'.'+Data['extension'][ifig]) Out+=[savename] fig.savefig(savename,dpi=fig.dpi) if clearfig: fig.clear() if closefig: pl.close(fig) return Err,Out
def gen_mdates(cf,prev_month=-1): start_date,end_date=opt.dates_info(cf) y1,m1,d1=dateu.parse_date(start_date) y,m,d=dateu.parse_date(dateu.currday()) y2,m2=dateu.next_month(y,m,n=prev_month) dates=dateu.mrange(y1,m1,y2,m2) out=[] for i in range(len(dates)-1): date1=dates[i] date2=dateu.next_date(dates[i+1],-1) out+=[(date1,date2)] return out
def gen_mdates(cf, prev_month=-1): start_date, end_date = opt.dates_info(cf) y1, m1, d1 = dateu.parse_date(start_date) y, m, d = dateu.parse_date(dateu.currday()) y2, m2 = dateu.next_month(y, m, n=prev_month) dates = dateu.mrange(y1, m1, y2, m2) out = [] for i in range(len(dates) - 1): date1 = dates[i] date2 = dateu.next_date(dates[i + 1], -1) out += [(date1, date2)] return out
def check_bc(date,FA,wait=3600,cf=CONF): print 'checking parent model...' import get_mercator as get_hycom nforec=opt.n_pred(cf) date1=None if FA=='f': date1=dateu.parse_date(dateu.next_date(date,nforec)) ir=get_hycom.is_ready(date,date1,check1=FA=='a') if get_hycom.is_ready(date,date1): print 'bc ok at check' return True else: now = dateu.currday() tdiff = dateu.date_diff(date,now) print "waiting for bc" while tdiff.days < 1.5: time.sleep(wait) sys.stdout.write('.') now = dateu.currday() tdiff = dateu.date_diff(date,now) cond= get_hycom.is_ready(date,date1,check1=FA=='a') print " bc file ready = ",cond,' at ',now, tdiff if cond: return True return get_hycom.is_ready(date,date1,check1=FA=='a')
def gen_url(date,varname,dataset='GOM'): names={} names['temperature']='temp' names['salinity']='salt' names['u']='uvel' names['v']='vvel' if varname in names.keys(): varname=names[varname] date=dateu.parse_date(date) y=date.year if dataset=='GOM': date=date.strftime('%Y%m%d') if date<='20120406': return 'http://tds.hycom.org/thredds/dodsC/GOMl0.04/expt_30.1' else: return 'http://tds.hycom.org/thredds/dodsC/GOMl0.04/expt_31.0' url0='http://tds.hycom.org/opendap/nph-dods/datasets/hycom/GOMl0.04/expt_20.1/' date=date.strftime('%Y%m%d') if date>='20120101': url='http://tds.hycom.org/thredds/dodsC/GOMl0.04/expt_30.1/2012' elif date>='20110101': return 'http://tds.hycom.org/thredds/dodsC/GOMl0.04/expt_30.1/2011' elif dataset=='GLOBAL': return 'http://tds.hycom.org/thredds/dodsC/glb_analysis'
def op_plt(conf,plconf,date,FA,nest=0,**kargs): herr=hout=ferr=fout=werr=wout=wrerr=wrout=[] flags=opt.flags_info(plconf) data=dateu.parse_date(date) hcond = (FA=='a' and flags['hslicesa']) or (FA=='f' and flags['hslicesf']) fcond = (FA=='a' and flags['floatsa']) or (FA=='f' and flags['floatsf']) wcond = (FA=='a' and flags['winda']) or (FA=='f' and flags['windf']) wrcond = (FA=='a' and flags['windra']) or (FA=='f' and flags['windrf']) if 'vname' in kargs.keys(): hcond = kargs['vname'] in ('temp','salt','zeta') or kargs['vname'].startswith('dye') fcond = kargs['vname']=='floats' wcond = kargs['vname']=='wind' wrcond = kargs['vname']=='windr' if hcond: fcond,wcond,wrcond = False,False,False if fcond: hcond,wcond,wrcond = False,False,False if wcond: hcond,fcond,wrcond = False,False,False if wrcond: hcond,fcond,wcond = False,False,False if hcond: herr, hout = op_plt_hslice(conf,plconf,date,FA,nest,**kargs) if fcond: ferr, fout = op_plt_flt(conf,plconf,date,FA,nest,**kargs) if wcond: werr, wout = op_plt_wind(conf,plconf,date,FA,nest,**kargs) if wrcond: wrerr,wrout = op_plt_wind_rose(conf,plconf,date,FA,nest,**kargs) return herr+ferr+werr+wrerr,hout+fout+wout+wrout
def fill(self, data, tind='next', quiet=1): ''' Fills model netcdf climatology file (data can be provided by prognostic.data2roms ''' nc = netcdf.Pync(self.filename, 'w') if tind == 'next': tind = nc.dims['time'] if not quiet: print('filling clm file %s' % self.filename) # about time: try: date = dts.parse_date(data['date']) time = netcdf.date2num(date, self.tunits) except: time = data['date'] # date as number! for i in nc.varnames: if i.endswith('time'): if not quiet: print(' -- %s tind=%d %f' % (i, tind, time)) nc.vars[i][tind] = time names = 'temp', 'salt', 'u', 'v', 'ubar', 'vbar', 'zeta' for i in names: if not quiet: print(' %s' % i) nc.vars[i][tind, ...] = data[i] nc.close()
def gen_ini(date,FA='a',nest=0,cf=CONF): date=dateu.parse_date(date) dateRst=dateu.next_date(date,-1) rst=opt.nameof('out','rst',date=dateRst,FA=FA,nest=nest,cf=cf) ini=opt.nameof('in', 'ini',date=date,FA=FA,nest=nest,cf=cf) err=False isFatal=False if os.path.isfile(ini): err='INI file already exists: ....%s' % ini[-30:] isFatal=False elif not os.path.isfile(rst): err='RST file not found: %s' % rst isFatal=True else: y,m,d=date.year,date.month,date.day model=opt.get_conf(cf,'MODEL','name',type=str)[nest] if model.lower()=='roms-agrif' and (m,d)==(1,1): # copy and change time to 0: err=opt.restart_ini(rst,ini) if err: isFatal=True else: # do not copy, create link: try: os.symlink(rst,ini) except OSError, e: err=e.strerror isFatal=True
def gen_atmfrc(date,FA='a',nest=0,cf=CONF,quiet=True): date=dateu.parse_date(date) err='' isFatal=False fname=opt.nameof('in','blk',date=date,FA=FA,nest=nest,cf=cf) if os.path.isfile(fname): err='BLK file already exists' isFatal=False else: grd=opt.nameof('in','grd',cf=cf) atmPath = opt.pathof(cf,'external','atm') atmData=opt.atm_info(cf)['data'] nforec=opt.n_pred(cf) if atmData=='wrf': cycle=366-29+dateu.mndays(date.year,date.month) err=bfrc.make_blk_wrf(fname,grd,date,FA,atmPath,quiet=quiet,cycle=cycle) elif atmData=='gfs': from okean.roms.inputs import surface if FA=='a': nforec=0 model=opt.get_conf(cf,'MODEL','name',type=str)[nest].lower() surface.make_blk_gfs(atmPath,grd,fname,date,nforec=nforec,model=model,quiet=quiet) try: if err: err='ERROR creating bulk file ('+err+')' isFatal=True except OSError, e: err='ERROR creating bulk file ('+str(e)+')' isFatal=True except:
def op_plt(conf, plconf, date, FA, nest=0, **kargs): herr = hout = ferr = fout = werr = wout = wrerr = wrout = [] flags = opt.flags_info(plconf) data = dateu.parse_date(date) hcond = (FA == 'a' and flags['hslicesa']) or (FA == 'f' and flags['hslicesf']) fcond = (FA == 'a' and flags['floatsa']) or (FA == 'f' and flags['floatsf']) wcond = (FA == 'a' and flags['winda']) or (FA == 'f' and flags['windf']) wrcond = (FA == 'a' and flags['windra']) or (FA == 'f' and flags['windrf']) if 'vname' in kargs.keys(): hcond = kargs['vname'] in ('temp', 'salt', 'zeta') or kargs['vname'].startswith('dye') fcond = kargs['vname'] == 'floats' wcond = kargs['vname'] == 'wind' wrcond = kargs['vname'] == 'windr' if hcond: fcond, wcond, wrcond = False, False, False if fcond: hcond, wcond, wrcond = False, False, False if wcond: hcond, fcond, wrcond = False, False, False if wrcond: hcond, fcond, wcond = False, False, False if hcond: herr, hout = op_plt_hslice(conf, plconf, date, FA, nest, **kargs) if fcond: ferr, fout = op_plt_flt(conf, plconf, date, FA, nest, **kargs) if wcond: werr, wout = op_plt_wind(conf, plconf, date, FA, nest, **kargs) if wrcond: wrerr, wrout = op_plt_wind_rose(conf, plconf, date, FA, nest, **kargs) return herr + ferr + werr + wrerr, hout + fout + wout + wrout
def zip_out_monthly(cf, ccf): '''Compress monthly model stdout files until previous month - minmonths See clean.conf ''' quiet = opt.get_conf(ccf, 'FLAGS')[0]['quiet'] conf, err = opt.get_conf(ccf, 'OUT') if not conf['run']: return dates = gen_mdates(cf, prev_month=-conf['minmonths']) # start log: flog = log_init(cf, ccf) if not quiet: print >> flog, 'zip_out_monthly'.upper() + ' :' for date1, date2 in dates: y, m = dateu.parse_date(date1)[:-1] month = dateu.month_names(m).lower() label = '%s_%d' % (month, y) for FA in 'fa': zip_rout(cf, date1, date2, FA, conf['clean'], quiet, label, output=flog) # end log: if not quiet: print >> flog, '' # empty line log_end(flog)
def fill(self,data,tind='next',quiet=1): ''' Fills model netcdf climatology file (data can be provided by prognostic.data2roms ''' nc=netcdf.Pync(self.filename,'w') if tind=='next': tind=nc.dims['time'] if not quiet: print('filling clm file %s' % self.filename) # about time: try: date=dts.parse_date(data['date']) time=netcdf.date2num(date,self.tunits) except: time=data['date'] # date as number! for i in nc.varnames: if i.endswith('time'): if not quiet: print(' -- %s tind=%d %f' % (i,tind,time)) nc.vars[i][tind]=time names='temp','salt','u','v','ubar','vbar','zeta' for i in names: if not quiet: print(' %s' % i) nc.vars[i][tind,...]=data[i] nc.close()
def op_plt_wind(conf, plconf, date, FA='a', nest=0, **kargs): closefig = kargs.get('closefig', True) clearfig = kargs.get('clearfig', True) save = kargs.get('save', True) date = dateu.parse_date(date) Figure, err = opt.get_plconf(plconf, 'FIGURE') Err = [] Out = [] pltpath = opt.nameof('out', 'plots', cf=conf) Data, err = opt.get_plconf(plconf, 'WIND') ifig = -1 for tag in Figure['tag']: ifig += 1 args = {} args['ifig'] = ifig tind = Data['time'][ifig] if tind == 'dailyMean': if FA == 'a': ndays = 1 elif FA == 'f': ndays = int(opt.get_conf(conf, 'PRED', 'npred')[0]) for day in range(ndays): # loop over time: args['day'] = day err, fig, info = plt_wind(conf, plconf, date, FA, nest, **args) Err += [err] if not err: # save: if save: if tag: Tag = '_' + tag else: Tag = tag savename = 'wind_%s_%s_%d_n%d%s' % ( date.strftime('%Y%m%d'), FA, day, nest, Tag) Y, M, D = date.year, date.month, date.day Ydest = os.path.join(pltpath, '%s' % Y) if not os.path.isdir(Ydest): os.mkdir(Ydest) MDdest = os.path.join(Ydest, '%02d_%02d' % (M, D)) if not os.path.isdir(MDdest): os.mkdir(MDdest) savename = os.path.join( MDdest, savename + '.' + Figure['extension'][ifig]) Out += [savename] pl.savefig(savename, dpi=pl.gcf().dpi) if clearfig: fig.clear() if closefig: pl.close(fig) return Err, Out
def roms_read_out(f): '''Parse ROMS output text file Returns Time, Ek,Ep,Etotal,Volume Ex: time,kin,pot,tot,vol=roms_read_out('roms.out') Martinho MA, 2012 ''' tag = ' STEP Day' nskip = 2 badFormatStr = '******' fid = open(f) lines = fid.readlines() n = -1 for l in lines: if l.find('time_ref') >= 0: print str(int(float(l.split()[0]))) if l.find('time_ref') >= 0: time_ref = dateu.parse_date(str(int(float(l.split()[0])))) if l.find(tag) == 0: break n += 1 n = n + nskip + 1 time = [] kin = [] pot = [] tot = [] vol = [] c = -1 for l in lines[n:]: c += 1 if c == 0: L = len(l) if l.find('_') != -1 or l.find('=') != -1: continue # DEF_HIS, etc if l.find(badFormatStr): l.replace(badFormatStr, '0') if len(l) == L: tmp = l.split() # time: days = int(tmp[1]) hh, mm, ss = [int(j) for j in tmp[2].split(':')] time += [ time_ref + datetime.timedelta(days=days, hours=hh, minutes=mm, seconds=ss) ] kin += [float(tmp[3])] pot += [float(tmp[4])] tot += [float(tmp[5])] vol += [float(tmp[6])] return np.asarray(time), np.asarray(kin), np.asarray(pot), np.asarray( tot), np.asarray(vol)
def data(self, date0=False, date1=False, quiet=True): ''' Returns atm data form all times in basefolder files or between date0 (>=) and date1 (<=) ''' if date0 is False: date0 = datetime.datetime(1, 1, 1) else: date0 = dateu.parse_date(date0) if date1 is False: date1 = datetime.datetime(9999, 1, 1) else: date1 = dateu.parse_date(date1) out = None for f in self.files: time = read_time(f) print(time[0], time[-1]) if not np.any((time >= date0) & (time <= date1)): continue if not quiet: print('-> extracting file %s' % f) res = wrf_file_data(f, quiet=quiet) time = res['time'] i, = np.where(time >= date0) j, = np.where(time <= date1) i = i[0] j = j[-1] for k in res.keys(): if k == 'time': res[k] = res[k][i:j + 1] else: res[k].data = res[k].data[i:j + 1] if not out: out = res else: i0 = np.where(res['time'] > out['time'][-1])[0][0] for k in res.keys(): if k == 'time': out[k] = np.hstack((out[k], res[k][i0:])) else: out[k].data = np.concatenate( (out[k].data, res[k].data[i0:])) return out
def op_plt_wind(conf,plconf,date,FA='a',nest=0,**kargs): closefig = kargs.get('closefig',True) clearfig = kargs.get('clearfig',True) save = kargs.get('save',True) date=dateu.parse_date(date) Figure,err = opt.get_plconf(plconf,'FIGURE') Err=[] Out=[] pltpath = opt.nameof('out','plots',cf=conf) Data,err = opt.get_plconf(plconf,'WIND') ifig=-1 for tag in Figure['tag']: ifig+=1 args={} args['ifig']=ifig tind = Data['time'][ifig] if tind == 'dailyMean': if FA=='a': ndays=1 elif FA=='f': ndays=int(opt.get_conf(conf,'PRED','npred')[0]) for day in range(ndays): # loop over time: args['day']=day err,fig,info=plt_wind(conf,plconf,date,FA,nest,**args) Err+=[err] if not err: # save: if save: if tag: Tag='_'+tag else: Tag=tag savename='wind_%s_%s_%d_n%d%s' % (date.strftime('%Y%m%d'),FA,day,nest,Tag) Y,M,D=date.year,date.month,date.day Ydest = os.path.join(pltpath,'%s' % Y ) if not os.path.isdir(Ydest): os.mkdir(Ydest) MDdest = os.path.join(Ydest,'%02d_%02d' % (M,D) ) if not os.path.isdir(MDdest): os.mkdir(MDdest) savename=os.path.join(MDdest,savename+'.'+Figure['extension'][ifig]) Out+=[savename] pl.savefig(savename,dpi=pl.gcf().dpi) if clearfig: fig.clear() if closefig: pl.close(fig) return Err,Out
def roms_read_out(f,romsagrif=False): '''Parse ROMS output text file Returns Time, Ek,Ep,Etotal,Volume Ex: time,kin,pot,tot,vol=roms_read_out('roms.out') Martinho MA, 2012 ''' if romsagrif: tag=' MAIN: started time-steping.' nskip=2 else: tag=' STEP Day' nskip=2 badFormatStr='******' fid=open(f) lines=fid.readlines() n=-1 for l in lines: if l.find('time_ref')>=0: print str(int(float(l.split()[0]))) if l.find('time_ref')>=0: time_ref=dateu.parse_date(str(int(float(l.split()[0])))) if l.find(tag)==0: break n+=1 n=n+nskip+1 time=[] kin=[] pot=[] tot=[] vol=[] c=-1 for l in lines[n:]: c+=1 if c==0: L=len(l) if l.find('_')!=-1 or l.find('=')!=-1: continue # DEF_HIS, etc if l.find(badFormatStr): l.replace(badFormatStr,'0') if len(l)==L: tmp=l.split() # time: days=int(tmp[1]) hh,mm,ss=[int(j) for j in tmp[2].split(':')] time+=[time_ref+datetime.timedelta(days=days,hours=hh,minutes=mm,seconds=ss)] kin+=[float(tmp[3])] pot+=[float(tmp[4])] tot+=[float(tmp[5])] vol+=[float(tmp[6])] return np.asarray(time),np.asarray(kin),np.asarray(pot),np.asarray(tot),np.asarray(vol)
def data(self,date0=False,date1=False,quiet=True): ''' Returns atm data form all times in basefolder files or between date0 (>=) and date1 (<=) ''' if date0 is False: date0=datetime.datetime(1,1,1) else: date0=dateu.parse_date(date0) if date1 is False: date1=datetime.datetime(9999,1,1) else: date1=dateu.parse_date(date1) out=None for f in self.files: time=read_time(f) print time[0],time[-1] if not np.any((time>date0)&(time<date1)): continue if not quiet: print '-> extracting file %s'%f res=wrf_file_data(f,quiet=quiet) time=res['time'] i,=np.where(time>=date0) j,=np.where(time<=date1) i=i[0] j=j[-1] for k in res.keys(): if k =='time': res[k]=res[k][i:j+1] else: res[k].data=res[k].data[i:j+1] if not out: out=res else: i0=np.where(res['time']>out['time'][-1])[0][0] for k in res.keys(): if k =='time': out[k]=np.hstack((out[k],res[k][i0:])) else: out[k].data=np.concatenate((out[k].data,res[k].data[i0:])) return out
def files(self,date0,date1=False): date0=dateu.parse_date(date0) if not date1: date1=date0+datetime.timedelta(1) else: date1=dateu.parse_date(date1) dates=dateu.drange(date0,date1,True) files=[] time=[] for d in dates: if d==dates[-1]: hours=[0] else: hours=range(0,24,narrdt) for hour in hours: #fname='narr-a_%d_%s_%02d00_000.grb' % (int(d.strftime('%j'))-1,d.strftime('%Y%m%d'),hour) fname='narr-a_221_%s_%02d00_000.grb' % (d.strftime('%Y%m%d'),hour) files+=[os.path.join(baseurl,d.strftime('%Y%m'),d.strftime('%Y%m%d'),fname)] time+=[d+datetime.timedelta(hour/24.)] return files, np.array(time)
def clean_nc_files(cf,date1,date2,type,FA,nest=0,mdkeep=0,wkeep=-1,clean=False,quiet=False,output=sys.stdout): ''' mdkeep: day,month to keep ex: mdkeep=1 => keep day 1 of every month ex: mdkeep=(1,1) => keep day 1 of January (month,day) ex: mdkeep=0 => no keep wkeep: day of week to keep: ex: wkeep=0 => keep first day of week (synday) ex: wkeep=-1 => no keep ''' date=date1 while date<=date2: f=opt.nameof('out',type,date,FA,nest,cf) if os.path.isfile(f): hs=cb.hsize(os.path.getsize(f)) y,m,d=dateu.parse_date(date) dweek=dateu.parse_date(date).weekday() if mdkeep==-1: MDkeep=dateu.mndays(y,m) # last day of month, then calc last day: else: MDkeep=mdkeep if MDkeep and MDkeep==d or MDkeep==(m,d): if not quiet: print >>output,'*Keeping ',date,' ', f, ' ',hs[0],hs[1],'**', MDkeep elif wkeep!=-1 and wkeep==dweek: if not quiet: print >>output,'*Keeping ',date,' ', f, ' ',hs[0],hs[1],'*', wkeep else: if not quiet: if clean: print >>output,'Removing ',date,' ', f, ' ',hs[0],hs[1] else: print >>output,'Not removing ',date,' ', f, ' ',hs[0],hs[1] if clean: os.remove(f) else: print >>output,date,' ',FA,' no file' date=dateu.next_date(date,1)
def __init__(self, filename, grid, sparams, **kargs): if not 'type' in kargs.keys(): kargs['type'] = 'ROMS Initial file' if not 'title' in kargs.keys(): kargs['title'] = 'ROMS Initial file' GenCommon.__init__(self, filename, grid, sparams, **kargs) self.date = kargs.get('date', 0) # about time: try: self.date = dts.parse_date(self.date) self.time = netcdf.date2num(self.date, self.tunits) except: self.time = self.date # date as number!
def nameof_log(date,FA,nest=0,cf=CONF): # currently Type is log Type='log' date=dateu.parse_date(date).strftime('%Y%m%d') name,e = get_conf(cf,'NAME_LOG','name_'+Type,type=str) path0,e = get_conf(cf,'PATHS','root',type=str) if path0=='.': path0=os.path.realpath(os.path.dirname(cf)) else: path0=os.path.realpath(path0) path1,e = get_conf(cf,'PATHS','logpath',type=str) name=name_rep(name,DATE=date,FA=FA,NEST=str(nest)) return os.path.join(path0,path1,name)
def data(self,date0=False,date1=False,quiet=True,grd=False): ''' Returns atm data date0 (>=) and date1 (<=) ''' if grd: from okean import roms r=roms.Grid(grd) xlim=r.lon.min(),r.lon.max() ylim=r.lat.min(),r.lat.max() lims=xlim,ylim else: lims=False # get data for all times in file: res=cordex_file_data(self.f,lims=lims,quiet=quiet) time=res['time'] if date0: date0=dateu.parse_date(date0) i,=np.where(time>=date0) i=i[0] else: i=0 if date1: date1=dateu.parse_date(date1) j,=np.where(time<=date1) j=j[-1] else: j=len(time) if date0 or date1: for k in res: if k =='time': res[k]=res[k][i:j+1] else: res[k].data=res[k].data[i:j+1,...] return res
def gen_rivfrc(date,FA='a',nest=0,cf=CONF): date=dateu.parse_date(date) err='' isFatal=False fname=opt.nameof('in','frc',date=date,FA=FA,nest=nest,cf=cf) grd=opt.nameof('in','grd',cf=cf) if os.path.isfile(fname): err='RIVERS file already exists' isFatal=False else: nforec=opt.n_pred(cf) import get_rivers date1=None if FA=='f': date1=dateu.parse_date(dateu.next_date(date,nforec)) try: err=get_rivers.gen_frc(fname,grd,date,date1) except: err='ERROR creating rivers file' if err: isFatal=True return err, isFatal, fname
def nameof_log(date, FA, nest=0, cf=CONF): # currently Type is log Type = 'log' date = dateu.parse_date(date).strftime('%Y%m%d') name, e = get_conf(cf, 'NAME_LOG', 'name_' + Type, type=str) path0, e = get_conf(cf, 'PATHS', 'root', type=str) if path0 == '.': path0 = os.path.realpath(os.path.dirname(cf)) else: path0 = os.path.realpath(path0) path1, e = get_conf(cf, 'PATHS', 'logpath', type=str) name = name_rep(name, DATE=date, FA=FA, NEST=str(nest)) return os.path.join(path0, path1, name)
def op_plt_flt(conf, plconf, date, FA='f', nest=0, **kargs): closefig = kargs.get('closefig', True) clearfig = kargs.get('clearfig', True) save = kargs.get('save', True) date = dateu.parse_date(date) Figure, err = opt.get_plconf(plconf, 'FIGURE') Err = [] Out = [] pltpath = opt.nameof('out', 'plots', cf=conf) ifig = -1 for tag in Figure['tag']: ifig += 1 args = {} args['ifig'] = ifig err, fig, info = plt_flt(conf, plconf, date, FA, nest, **args) Err += [err] if not err: # save: if save: if tag: Tag = '_' + tag else: Tag = tag savename = 'floats_%s_%s_n%d%s' % (date, FA, nest, Tag) Y, M, D = date.year, date.month, date.day, Ydest = os.path.join(pltpath, '%s' % Y) if not os.path.isdir(Ydest): os.mkdir(Ydest) MDdest = os.path.join(Ydest, '%02d_%02d' % (M, D)) if not os.path.isdir(MDdest): os.mkdir(MDdest) savename = os.path.join( MDdest, savename + '.' + Figure['extension'][ifig]) Out += [savename] pl.savefig(savename, dpi=pl.gcf().dpi) if clearfig: fig.clear() if closefig: pl.close(fig) return Err, Out
def __init__(self,filename,grid,sparams,**kargs): if not 'type' in kargs.keys(): kargs['type'] = 'ROMS Initial file' if not 'title' in kargs.keys(): kargs['title'] = 'ROMS Initial file' GenCommon.__init__(self,filename,grid,sparams,**kargs) date = 0 if 'date' in kargs.keys(): date = kargs['date'] self.date = date # about time: try: self.date=dts.parse_date(self.date) self.time=netcdf.date2num(self.date,self.tunits) except: self.time=self.date # date as number!
def op_plt_flt(conf,plconf,date,FA='f',nest=0,**kargs): closefig = kargs.get('closefig',True) clearfig = kargs.get('clearfig',True) save = kargs.get('save',True) date=dateu.parse_date(date) Figure,err = opt.get_plconf(plconf,'FIGURE') Err=[] Out=[] pltpath = opt.nameof('out','plots',cf=conf) ifig=-1 for tag in Figure['tag']: ifig+=1 args={} args['ifig']=ifig err,fig,info=plt_flt(conf,plconf,date,FA,nest,**args) Err+=[err] if not err: # save: if save: if tag: Tag='_'+tag else: Tag=tag savename='floats_%s_%s_n%d%s' % (date,FA,nest,Tag) Y,M,D=date.year,date.month,date.day, Ydest = os.path.join(pltpath,'%s' % Y ) if not os.path.isdir(Ydest): os.mkdir(Ydest) MDdest = os.path.join(Ydest,'%02d_%02d' % (M,D) ) if not os.path.isdir(MDdest): os.mkdir(MDdest) savename=os.path.join(MDdest,savename+'.'+Figure['extension'][ifig]) Out+=[savename] pl.savefig(savename,dpi=pl.gcf().dpi) if clearfig: fig.clear() if closefig: pl.close(fig) return Err,Out
def fill(self, data, tind='next', quiet=1): ''' Fills model netcdf boundary conditions (data can be provided by prognostic.data2romsbry ''' nc = netcdf.Pync(self.filename, 'w') if tind == 'next': tind = nc.dims['time'] if not quiet: print('filling bry file %s' % self.filename) # about time: try: date = dts.parse_date(data['date']) time = netcdf.date2num(date, self.tunits) except: time = data['date'] # date as number! for i in nc.varnames: if i.endswith('time'): if not quiet: print(' -- %s tind=%d %f' % (i, tind, time)) nc.vars[i][tind] = time names = 'temp', 'salt', 'u', 'v', 'ubar', 'vbar', 'zeta' if self.addxz: names = list(names) + [ 'dist', 'distu', 'distv', 'depth', 'depthu', 'depthv' ] for i in names: for j in 'north', 'south', 'east', 'west': vname = i + '_' + j if vname in nc.varnames: if vname.startswith('dist'): if tind == 0: if not quiet: print(' %s' % vname) nc.vars[vname][:] = data[ vname] # not time dependent else: if not quiet: print(' %s' % vname) nc.vars[vname][tind, ...] = data[vname] nc.close()
def fill(self,data,tind='next',quiet=1): ''' Fills model netcdf boundary conditions (data can be provided by prognostic.data2romsbry ''' nc=netcdf.Pync(self.filename,'w') if tind=='next': tind=nc.dims['time'] if not quiet: print('filling bry file %s' % self.filename) # about time: try: date=dts.parse_date(data['date']) time=netcdf.date2num(date,self.tunits) except: time=data['date'] # date as number! for i in nc.varnames: if i.endswith('time'): if not quiet: print(' -- %s tind=%d %f' % (i,tind,time)) nc.vars[i][tind]=time names='temp','salt','u','v','ubar','vbar','zeta' if self.addxz: names=list(names)+['dist','distu','distv','depth','depthu','depthv'] for i in names: for j in 'north','south','east','west': vname=i+'_'+j if vname in nc.varnames: if vname.startswith('dist'): if tind==0: if not quiet: print(' %s' % vname) nc.vars[vname][:]=data[vname] # not time dependent else: if not quiet: print(' %s' % vname) nc.vars[vname][tind,...]=data[vname] nc.close()
def zip_plots_monthly(cf,ccf): '''Compress monthly model stdout files until previous month - minmonths See clean.conf ''' quiet=opt.get_conf(ccf,'FLAGS')[0]['quiet'] conf,err=opt.get_conf(ccf,'PLOTS') if not conf['run']: return dates=gen_mdates(cf,prev_month=-conf['minmonths']) # start log: flog=log_init(cf,ccf) if not quiet: print >>flog, 'zip_plots_monthly'.upper()+' :' for startMonth,endMonths in dates: y,m,d=dateu.parse_date(startMonth) zip_plots(cf,y,m,conf['clean'],quiet,overwrite=False,output=flog) # end log: if not quiet: print >>flog, '' # empty line log_end(flog)
def zip_plots_monthly(cf, ccf): '''Compress monthly model stdout files until previous month - minmonths See clean.conf ''' quiet = opt.get_conf(ccf, 'FLAGS')[0]['quiet'] conf, err = opt.get_conf(ccf, 'PLOTS') if not conf['run']: return dates = gen_mdates(cf, prev_month=-conf['minmonths']) # start log: flog = log_init(cf, ccf) if not quiet: print >> flog, 'zip_plots_monthly'.upper() + ' :' for startMonth, endMonths in dates: y, m, d = dateu.parse_date(startMonth) zip_plots(cf, y, m, conf['clean'], quiet, overwrite=False, output=flog) # end log: if not quiet: print >> flog, '' # empty line log_end(flog)
def find_last(type='rout',FA='a',nest=0,cf=CONF): kargs={'date':'*','FA':FA,'nest':nest,'cf':cf} if type=='rout': name=opt.nameof('out',type,**kargs) elif type=='ini': name=opt.nameof('in', type,**kargs) elif type=='rst': name=opt.nameof('out',type,**kargs) fs=glob.glob(name) if fs: L=name.index('*') date=fs[0][L:L+8] file=fs[0] for i in range(1,len(fs)): if fs[i][L:L+8]>date: date=fs[i][L:L+8] file=fs[i] return dateu.parse_date(date), file else: return '',''
def get_clims(date,var,depth,plconf): mes=dateu.parse_date(date).month mes='%02d' % mes sectionNoDepth='CLIM_'+var.upper() if not depth is False: section = 'CLIM_'+var.upper()+'_'+str(depth) else: section = sectionNoDepth option = str(mes) res,err=get_plconf(plconf,section,option) # if error, try to get the sectionNoDepth: if isinstance(err,ConfigParser.NoSectionError): section=sectionNoDepth res,err=get_plconf(plconf,section,option) # if error, try the default option: if not res: res,err=get_plconf(plconf,section,'default') return res
def get_clims(date, var, depth, plconf): mes = dateu.parse_date(date).month mes = '%02d' % mes sectionNoDepth = 'CLIM_' + var.upper() if not depth is False: section = 'CLIM_' + var.upper() + '_' + str(depth) else: section = sectionNoDepth option = str(mes) res, err = get_plconf(plconf, section, option) # if error, try to get the sectionNoDepth: if isinstance(err, ConfigParser.NoSectionError): section = sectionNoDepth res, err = get_plconf(plconf, section, option) # if error, try the default option: if not res: res, err = get_plconf(plconf, section, 'default') return res
def zip_out_monthly(cf,ccf): '''Compress monthly model stdout files until previous month - minmonths See clean.conf ''' quiet=opt.get_conf(ccf,'FLAGS')[0]['quiet'] conf,err=opt.get_conf(ccf,'OUT') if not conf['run']: return dates=gen_mdates(cf,prev_month=-conf['minmonths']) # start log: flog=log_init(cf,ccf) if not quiet: print >>flog, 'zip_out_monthly'.upper()+' :' for date1,date2 in dates: y,m=dateu.parse_date(date1)[:-1] month=dateu.month_names(m).lower() label='%s_%d' % (month,y) for FA in 'fa': zip_rout(cf,date1,date2,FA,conf['clean'],quiet,label,output=flog) # end log: if not quiet: print >>flog, '' # empty line log_end(flog)
def fill(self,data,tind='next',quiet=1): ''' Fills model netcdf bulk forcing file ''' nc=netcdf.Pync(self.filename,'w') if tind=='next': tind=nc.dims['time'] if not quiet: print('filling blk file %s' % self.filename) # about time: try: date=dts.parse_date(data['date']) time=netcdf.date2num(date,self.tunits) except: time=data['date'] # date as number! for i in nc.varnames: if i.endswith('time'): if not quiet: print(' -- %s tind=%d %f' % (i,tind,time)) nc.vars[i][tind]=time if 'Tair' in nc.varnames: # roms names=('Tair','tair'),('Pair','pres'),('Qair','rhum'),('rain','prate'),\ ('swrad','radsw'),('lwrad','radlw'),('Uwind','uwnd'),('Vwind','vwnd'),\ 'sustr','svstr','wspd','cloud',('lwrad_down','dlwrf') if not 'tair' in data.keys(): # assuming data has roms (not agrif) varnames: names='Tair','Pair','Qair','rain','swrad','lwrad','Uwind','Vwind','sustr','svstr','wspd','cloud','lwrad_down' elif 'tair' in nc.varnames: # roms-agrif names='tair','pres','rhum','prate','radlw','radsw','dlwrf','uwnd',\ 'vwnd','wspd','sustr','svstr',\ 'cloud' # not used, but add it anyway for i in names: if isinstance(i,basestring): filev,datav=i,i else: filev,datav=i if datav not in data.keys(): if not quiet: print(' Warning: data key %s not present' % datav) else: if not quiet: print(' %s (%s) min=%8.3f max=%8.3f' % (filev.ljust(7),datav.ljust(7), data[datav].min(),data[datav].max())) nc.vars[filev][tind,...]=data[datav] # fill original data: orig=datav+'_original' if orig in data.keys() and not orig in nc.varnames: if not quiet: print(' Warning: original data will not be written %s' % orig) elif not orig in data.keys() and orig in nc.varnames: if not quiet: print(' Warning: original data not present %s' % orig) elif orig in data.keys() and orig in nc.varnames: if not quiet: print(' %s min=%8.3f max=%8.3f' % (orig.ljust(7+9), data[orig].min(),data[orig].max())) nc.vars[orig][tind,...]=data[orig] # fill original x,y: if tind==0 and 'x_original' in data.keys() and 'x_original' in nc.varnames: if not quiet: print(' filling x,y original') nc.vars['x_original'][:]=data['x_original'] nc.vars['y_original'][:]=data['y_original'] nc.close()
def nameof(ioe,type,date='',FA='??',nest=0,cf=CONF,**kargs): '''return names with path of roms files, input, output and external ioe='in', 'out', 'ext', 'aux' ''' inStoragePath = kargs.get('istorage',False) outStoragePath = kargs.get('ostorage',False) extStoragePath = kargs.get('estorage',False) auxStoragePath = kargs.get('astorage',False) if ioe in ('in',0): NAMEsection = 'NAME_INPUTS' NAMEoption = 'name_in_'+type PATHoption1 = 'inputs' PATHoption2 = 'inputs_'+type elif ioe in ('out',1): NAMEsection = 'NAME_OUTPUTS' NAMEoption = 'name_out_'+type PATHoption1 = 'outputs' PATHoption2 = 'outputs_'+type elif ioe in ('ext',2): NAMEsection = 'NAME_EXTERNAL' NAMEoption = 'name_ext_'+type PATHoption1 = 'external' PATHoption2 = 'external_'+type elif ioe=='aux': NAMEsection = 'NAME_AUX' NAMEoption = 'name_aux_'+type PATHoption1 = 'aux' PATHoption2 = 'aux_'+type PATHsection = 'PATHS' path0,e=get_conf(cf,PATHsection,'root',type=str) if path0=='.': path0=os.path.realpath(os.path.dirname(cf)) else: path0=os.path.realpath(path0) path1,e=get_conf(cf,PATHsection,PATHoption1,type=str) path2,e=get_conf(cf,PATHsection,PATHoption2,type=str) name,err=get_conf(cf,NAMEsection,NAMEoption,type=str) if err: print err return try: year=dateu.parse_date(date).year except: year='' if isinstance(date,datetime.datetime): date=dateStr=date.strftime('%Y%m%d') else: dateStr=date name=name_rep(name,DATE=dateStr,NEST=str(nest),FA=FA,YEAR=str(year)) if inStoragePath and ioe=='in': return os.path.join(inStoragePath,name) elif outStoragePath and ioe=='out': return os.path.join(outStoragePath,name) elif extStoragePath and ioe=='ext': return os.path.join(extStoragePath,name) elif auxStoragePath and ioe=='aux': return os.path.join(auxStoragePath,name) else: return os.path.join(path0,path1,path2,name)
def plt_wind(conf, plconf, date, FA='a', nest=0, **kargs): err = '' fig = False info = '' ifig = kargs.get('ifig', 0) day = kargs.get('day', 0) quiet = kargs.get('quiet', 0) time = day date = dateu.parse_date(date) # find input files: args = {'cf': conf, 'date': date, 'FA': FA, 'nest': nest} atm = opt.nameof('in', 'blk', **args) grd = opt.nameof('in', 'grd', **args) if not os.path.isfile(atm): err = 'ATM file not found (%s)' % atm return err, fig, info if not os.path.isfile(grd): err = 'Grid file not found (%s)' % grd return err, fig, info Data, err = opt.get_plconf(plconf, 'WIND') dcurr = Data['dcurr'][ifig] lcurr = Data['lcurr'][ifig] scurr = Data['scurr'][ifig] clim = Data['clim'][ifig] tind = Data['time'][ifig] x = netcdf.use(grd, 'lon_rho') y = netcdf.use(grd, 'lat_rho') wtime = netcdf.nctime(atm, 'time') cnd = (wtime >= date + datetime.timedelta(days=day)) & ( date < date + datetime.timedelta(days=day + 1)) u = netcdf.use(atm, 'Uwind', time=cnd) v = netcdf.use(atm, 'Uwind', time=cnd) if tind == 'dailyMean': u = u.mean(0) v = v.mean(0) sdate = wtime[cnd][ 0] # for title... 1st day 00h is expected to be 1st date, # or model should not run! else: # tind of some day, ex: tind 0 from forec day 3 u = u[tind] v = v[tind] sdate = wtime[cnd][tind] if day > len(u) - 1: err = 'Invalid day %d (max=%d)' % (day, len(u) - 1) return err, fig, info # plot grid: proj, fig, ax = plt_grid(plconf, grd, ifig) # no mask on land: mask = np.zeros(u.shape, 'bool') mask[::dcurr[0], ::dcurr[1]] = True xm, ym = proj(x, y) s = np.sqrt(u**2 + v**2) q = pl.quiver(xm[mask], ym[mask], u[mask], v[mask], s[mask], scale=scurr, zorder=100) pl.clim(clim[0], clim[1]) def add_colorbar(handle, **args): ax = pl.gca() Data, err = opt.get_plconf(plconf, 'AXES') cbpos = Data['cbpos'][ifig] cbbgpos = Data['cbbgpos'][ifig] cbbgc = Data['cbbgcolor'][ifig] cbbga = Data['cbbgalpha'][ifig] cblab = Data['cblabel'][ifig] # colorbar bg axes: if cbbgpos: rec = pl.axes((cbpos[0] - cbpos[2] * cbbgpos[0], cbpos[1] - cbbgpos[2] * cbpos[3], cbpos[2] * (1 + cbbgpos[0] + cbbgpos[1]), cbpos[3] * (1 + cbbgpos[2] + cbbgpos[3])), axisbg=cbbgc, frameon=1) rec.patch.set_alpha(cbbga) rec.set_xticks([]) rec.set_yticks([]) for k in rec.axes.spines.keys(): rec.axes.spines[k].set_color(cbbgc) rec.axes.spines[k].set_alpha(cbbga) # colorbar: if cbpos: cbax = fig.add_axes(cbpos) if cbpos[2] > cbpos[3]: orient = 'horizontal' else: orient = 'vertical' cb = pl.colorbar(handle, cax=cbax, orientation=orient, drawedges=0, **args) pl.axes(ax) # colorbar label: cb.set_label(r'Wind Speed [m s$^{\rm{-1}}$]') def add_currkey(handle): pos = Data['kcurrpos'][ifig] if pos: pl.quiverkey(handle, pos[0], pos[1], lcurr, '%s m/s' % str(lcurr), labelpos='S', coordinates='axes') add_colorbar(q) add_currkey(q) # tilte: Title, err = opt.get_plconf(plconf, 'AXES', 'title') if Title[ifig]: simpleTitle = 1 rdate = date.strftime('%d-%m-%Y') title = 'wind %s %s %d' % (rdate, FA, day) if simpleTitle: # simpler version of title: if FA == 'f': # forecast date: rdate = dateu.next_date(date, day) rdate = rdate.strftime('%d-%m-%Y') title = 'wind %s' % (rdate) if FA == 'f': title = title + ' (forec)' pl.title(title) # logo: if ifig == 0: im = os.path.join(os.path.dirname(__file__), 'logo_INOCAR.png') i = pl.imread(im) h, w = i.shape[:2] rx = .12 W = (proj.xmax - proj.xmin) * rx H = W * h / w l = proj.xmax #pl.fill([proj.xmax-W, proj.xmax, proj.xmax, proj.xmax-W], # [proj.ymin, proj.ymin, proj.ymin+2.8*H, proj.ymin+2.8*H], # '#500000',alpha=0.25,ec='none') ax.imshow(i, extent=(proj.xmax * .98 - W, proj.xmax * .98, proj.ymin + H * .1, proj.ymin + H * 1.1), zorder=1e3) #pl.text(proj.xmax-W/2., proj.ymin+2.2*H,'OOF', # fontdict={'size':14,'family':'serif'}, # color='#500000',ha='center',weight='bold') pl.text( proj.xmax * .8, proj.ymax * (-.1), sdate.strftime("%d %b %Y"), #pl.text(proj.xmax*.62, proj.ymax*.93,sdate.strftime("%d %b %Y"), fontdict={ 'size': 13, 'family': 'monospace' }, ha='center') # change date format if tind is not daily mean, ie, add hour, etc if FA == 'f': s = 'Pronostico desde %s' % date.strftime("%d %b %Y") pl.text( proj.xmax * .8, proj.ymax * (-.15), s, ##this is outside #pl.text(proj.xmax-W/2., proj.ymin+1.1*H,s, ##this is in the proj (inside) fontdict={'fontsize': 10}, ha='center') # logo. # lims change in some mpl versions !! pl.gca().axis([proj.xmin, proj.xmax, proj.ymin, proj.ymax]) return err, fig, info
# rotate wind, print ' --> rot U,V' angle = g.use('angle') U, V = calc.rot2d(U, V, angle) print ' filling %s' % d.isoformat(' ') fill_frc(frcname, d, U, V) if __name__ == '__main__': import sys if len(sys.argv) == 5: grd = sys.argv[1] fname = sys.argv[2] date0 = dateu.parse_date(sys.argv[3]) date1 = dateu.parse_date(sys.argv[4]) make_frc(fname, grd, date0, date1) elif len(sys.argv) in [2, 3]: date = sys.argv[1] try: action = sys.argv[2] except: action = 'download' a = ascat() if action == 'download': if len(date) == 4: a.download_year(int(date)) else: a.download_day(date)
def gen_tidalfrc(date,FA='a',nest=0,cf=CONF): from py_pack.roms import tides_forcing as tfrc date=dateu.parse_date(date) err='Not implemented yet!!' isFatal=True
def oof(cf,plconf,date=False,last_date=False,FA='a',env=False): # start email notifications service: emailInfo=opt.email_info(cf=cf) sendEmail=emailInfo['send'] #if sendEmail: sys.stdout=opt.Redirect() env_vars(env) flags=opt.flags_info(cf) if date: date=dateu.parse_date(date) if last_date: last_date=dateu.parse_date(last_date) if not date: # find date-1 for prediction: date,file=find_last(type='rst',cf=cf) if not date: on_error(sendEmail,'ERROR (%s): Cannot find previous file'%FA,emailInfo) return else: print 'Last date = %s from file %s' % (date,file) rout=opt.nameof('out','rout',date=date,FA='a',cf=cf) if is_roms_out_ok(rout): print 'Previous roms out is ok: %s' % rout else: on_error(sendEmail,'ERROR (%s): Last run is not ok %s : %s' % (FA,date,rout),emailInfo) return else: date=dateu.next_date(date,-1) # read dates: start_date,end_date=opt.dates_info(cf) if last_date: end_date=dateu.next_date(last_date,+1) while date >= start_date and date < end_date: # read dates again, possible update may occur. start_date,end_date=opt.dates_info(cf) if last_date: end_date=dateu.next_date(last_date,+1) date=dateu.next_date(date) # check if already runned for that date: # ie, check for rst and check if roms_out is ok: rst=opt.nameof('out','rst',date=date,FA=FA,cf=cf) rout=opt.nameof('out','rout',date=date,FA=FA,cf=cf) if os.path.isfile(rst): print 'Found rst file for %s: %s' % (date,rst) if os.path.isfile(rout): if is_roms_out_ok(rout): print ' previous roms out is ok: %s' % rout else: on_error(sendEmail,'ERROR (%s): Previous roms out is NOT ok: %s' % (FA,rout),emailInfo) break else: print ' roms out for %s not found: NOT CHECKED' % date else: print '\nModel will start from %s' % date # check for atm data for current simulation: if flags['atmfrc'] or flags['atmblk']: atmStat=check_atm(date,FA,cf=cf) else: atmStat=True ## wait for rst in case of fa==F: ##if FA=='f': rstStat=check_rst(date,cf=cf) ##else: rstStat=True rstStat=check_rst(date,cf=cf) # check for bondary data for current simulation: if flags['clmbry']: # this step may take forever !! just let us belive parent model is available #bcStat=check_bc(date,FA,cf=cf) bcStat=True else: bcStat=True now=time.strftime("%Y-%m-%d %H:%M:%S +0",time.gmtime()) if (not atmStat is False) and (not rstStat is False) and (not bcStat is False): rout,dt,runErr=run(date,FA,cf=cf) now=time.strftime("%Y-%m-%d %H:%M:%S +0",time.gmtime()) # check if run was ok: if is_roms_out_ok(rout): msg='NO error %s %s'%(date,FA) Msg=' Run %s %s finished ok [%s] dt=%6.2f' % (date,FA,now,dt) print Msg # make plots: if flags['plots']: err,savenames=op_plot.op_plt(cf,plconf,date,FA) if not all(e=='' for e in err): msg+=' --> ERROR plotting' print ' ERROR plotting : ', for e in err: print e if not all(e=='' for e in savenames): for sv in savenames: print ' Saved plot '+sv elif runErr: on_error(sendEmail,'ERROR (%s): Run %s returned the error msg: %s' % (FA,date,runErr),emailInfo) break else: on_error(sendEmail,'ERROR (%s): Run %s finished with ERROR [%s] dt=%6.2f' % (FA,date,now,dt),emailInfo) break elif atmStat is False: Msg='ERROR (%s): Run %s cannot run (atm data missing) ERROR [%s]' % (FA,date,FA,now) if FA=='a': on_error(sendEmail,Msg,emailInfo) break else: msg='ERROR: atm data missing' print Msg elif rstStat is False: msg='ERROR: rst data missing' Msg='ERROR (%s): Run %s cannot run (atm data missing) ERROR [%s]' % (FA,date,now) print Msg print '\n' if sendEmail: send_email.send(emailInfo['dest'],Msg,msg)