def op_plt_many(conf,plconf,startDate,endDate,FA=('a','f'),**kargs): startDate=dateu.parse_date(startDate) endDate=dateu.parse_date(endDate) date=dateu.next_date(startDate,-1) while endDate>date: date=dateu.next_date(date) for p in FA: errs,names=op_plt(conf,plconf,date,FA=p,**kargs) for e,n in zip(errs,names): print e,n
def op_plt_many(conf, plconf, startDate, endDate, FA=('a', 'f'), **kargs): startDate = dateu.parse_date(startDate) endDate = dateu.parse_date(endDate) date = dateu.next_date(startDate, -1) while endDate > date: date = dateu.next_date(date) for p in FA: errs, names = op_plt(conf, plconf, date, FA=p, **kargs) for e, n in zip(errs, names): print e, n
def __update_wind(fname, datapath, source, **kargs): if source == 'quikscat': new_wind_info = 'wind from quikscat' from okean.datasets import quikscat a = quikscat.WINDData(datapath) elif source == 'blended': new_wind_info = 'wind from myocean blended' from okean.datasets import blended_wind a = blended_wind.WINDData(datapath) time = netcdf.nctime(fname, 'time') date0 = dts.next_date(time[0], -1) date1 = dts.next_date(time[-1], +2) data = a.data(date0, date1) update_wind(fname, data, new_wind_info, **kargs)
def __files(self, date0, date1=False, FA='a', nforec='auto'): ''' Used by files_analysis and files_forecast ''' if FA == 'f': date1 = False if nforec == 'auto': args = {} else: args = {'nforec': nforec} a = GFSDownload(basefolder=self.basefolder, **args) if date1 is False: dates = [date0] else: dates = dateu.drange(date0, date1) files = [] time = [] isbest = [] # first file, 00h data (last of previous day) datePrev = dateu.next_date(date0, -1) file0 = a.daily_files(datePrev, FA='a')[1][-1]['name'] for d in dates: Src, Dest = a.daily_files(d, FA=FA) for dest in Dest: files += [dest['name']] if files: files = [file0] + files for f in files: time += [get_date(f)] if os.path.isfile(f): isbest += [not os.path.islink(f)] else: isbest += [None] return files, time, isbest
def check_bc(date,FA,wait=3600,cf=CONF): print 'checking parent model...' import get_mercator as get_hycom nforec=opt.n_pred(cf) date1=None if FA=='f': date1=dateu.parse_date(dateu.next_date(date,nforec)) ir=get_hycom.is_ready(date,date1,check1=FA=='a') if get_hycom.is_ready(date,date1): print 'bc ok at check' return True else: now = dateu.currday() tdiff = dateu.date_diff(date,now) print "waiting for bc" while tdiff.days < 1.5: time.sleep(wait) sys.stdout.write('.') now = dateu.currday() tdiff = dateu.date_diff(date,now) cond= get_hycom.is_ready(date,date1,check1=FA=='a') print " bc file ready = ",cond,' at ',now, tdiff if cond: return True return get_hycom.is_ready(date,date1,check1=FA=='a')
def download_current(self,date=False,del1=True, quiet=True): ''' Download files for today analysis and forecast. If date is not provided (the current day is used), also download analysis data of last self.ngetBefore days. Inputs: date, default is the current day del1, if the original version is grib1, the conversion to grib2 is done and the version 1 files are removed if del1 is True quiet, print info flag ''' if not date: date=dateu.currday() else: self.ngetBefore=1 # just get the selected day! for i in range(self.ngetBefore): day=dateu.next_date(date,-i,samefmt=False) if not quiet: print 'Downloading GFS files for date '+day.strftime('%Y%m%d') if i==0: # download files for analysis and forecast: self.download_fast(day,FA='a',del1=del1,checkinv=True,quiet=quiet,prevopt=False) self.download_fast(day,FA='f',del1=del1,checkinv=True,quiet=quiet,prevopt=False) else: # download any missing analysis file from prev days: self.download_fast(day,FA='a',del1=del1,checkinv=False,quiet=quiet,prevopt=True)
def check_rst(date,cf,wait=900): '''check rst for forecast, and wait for tdiff''' FA='a' date0=dateu.next_date(date,-1) rst=opt.nameof('out','rst',date=date0,FA=FA,cf=cf) # check also if run terminated! The rst may not have the last reccord yet! rout=opt.nameof('out','rout',date=date0,FA=FA,cf=cf) if os.path.isfile(rst) and is_roms_out_ok(rout): return True else: now = dateu.currday() tdiff = dateu.date_diff(date,now) print "waiting for rst" while tdiff.days < 1.5: time.sleep(wait) sys.stdout.write('.') now = dateu.currday() tdiff = dateu.date_diff(date,now) cond=os.path.isfile(rst) and is_roms_out_ok(rout) print " rst file ready = ",cond,' at ',now, tdiff if cond: return True return os.path.isfile(rst)
def __update_wind(fname,datapath,source,**kargs): if source=='quikscat': new_wind_info='wind from quikscat' from okean.datasets import quikscat a=quikscat.WINDData(datapath) elif source=='blended': new_wind_info='wind from myocean blended' from okean.datasets import blended_wind a=blended_wind.WINDData(datapath) time=netcdf.nctime(fname,'time') date0=dts.next_date(time[0],-1) date1=dts.next_date(time[-1],+2) data=a.data(date0,date1) update_wind(fname,data,new_wind_info,**kargs)
def gen_clmbry(date,FA='a',nest=0,cf=CONF,quiet=True): date=dateu.parse_date(date) err='' isFatal=False fclm=opt.nameof('in','clm',date=date,FA=FA,nest=nest,cf=cf) fbry=opt.nameof('in','bry',date=date,FA=FA,nest=nest,cf=cf) grd=opt.nameof('in','grd',cf=cf) if os.path.isfile(fclm) and os.path.isfile(fbry): err='CLMBRY files already exists' isFatal=False else: nforec=opt.n_pred(cf) import get_mercator as get_hycom # no need to check if data is ready! if not gen_clm_bry will return error! # anyway, cannot know if hycom data of today is analtsis or forecast!! date1=None if FA=='f': date1=dateu.parse_date(dateu.next_date(date,nforec)) try: err=get_hycom.gen_clm_bry(fclm,fbry,grd,date,date1,quiet=quiet) if err: err='ERROR creating clm bry files : %s' % err isFatal=True except: err='ERROR creating clm bry files' isFatal=True return err, isFatal, fclm,fbry
def gen_ini(date,FA='a',nest=0,cf=CONF): date=dateu.parse_date(date) dateRst=dateu.next_date(date,-1) rst=opt.nameof('out','rst',date=dateRst,FA=FA,nest=nest,cf=cf) ini=opt.nameof('in', 'ini',date=date,FA=FA,nest=nest,cf=cf) err=False isFatal=False if os.path.isfile(ini): err='INI file already exists: ....%s' % ini[-30:] isFatal=False elif not os.path.isfile(rst): err='RST file not found: %s' % rst isFatal=True else: y,m,d=date.year,date.month,date.day model=opt.get_conf(cf,'MODEL','name',type=str)[nest] if model.lower()=='roms-agrif' and (m,d)==(1,1): # copy and change time to 0: err=opt.restart_ini(rst,ini) if err: isFatal=True else: # do not copy, create link: try: os.symlink(rst,ini) except OSError, e: err=e.strerror isFatal=True
def __files(self,date0,date1=False,FA='a',nforec='auto'): ''' Used by files_analysis and files_forecast ''' if FA=='f': date1=False if nforec=='auto': args={} else: args={'nforec':nforec} a=GFSDownload(basefolder=self.basefolder,**args) if date1 is False: dates=[date0] else: dates=dateu.drange(date0,date1) files=[] time=[] isbest=[] # first file, 00h data (last of previous day) datePrev=dateu.next_date(date0,-1) file0=a.daily_files(datePrev,FA='a')[1][-1]['name'] for d in dates: Src,Dest=a.daily_files(d,FA=FA) for dest in Dest: files+=[dest['name']] if files: files=[file0]+files for f in files: time+=[get_date(f)] if os.path.isfile(f): isbest+=[not os.path.islink(f)] else: isbest+=[None] return files,time,isbest
def download_current(self,date=False,del1=True, quiet=True): ''' Download files for today analysis and forecast. If date is not provided (the current day is used), also download analysis data of last self.ngetBefore days. Inputs: date, default is the current day del1, if the original version is grib1, the conversion to grib2 is done and the version 1 files are removed if del1 is True quiet, print info flag ''' if not date: date=dateu.currday() else: self.ngetBefore=1 # just get the selected day! for i in range(self.ngetBefore): day=dateu.next_date(date,-i,samefmt=False) if not quiet: print('Downloading GFS files for date '+day.strftime('%Y%m%d')) if i==0: # download files for analysis and forecast: self.download_fast(day,FA='a',del1=del1,checkinv=True,quiet=quiet,prevopt=False) self.download_fast(day,FA='f',del1=del1,checkinv=True,quiet=quiet,prevopt=False) else: # download any missing analysis file from prev days: self.download_fast(day,FA='a',del1=del1,checkinv=False,quiet=quiet,prevopt=True)
def clean_nc_files(cf, date1, date2, type, FA, nest=0, mdkeep=0, wkeep=-1, clean=False, quiet=False, output=sys.stdout): ''' mdkeep: day,month to keep ex: mdkeep=1 => keep day 1 of every month ex: mdkeep=(1,1) => keep day 1 of January (month,day) ex: mdkeep=0 => no keep wkeep: day of week to keep: ex: wkeep=0 => keep first day of week (synday) ex: wkeep=-1 => no keep ''' date = date1 while date <= date2: f = opt.nameof('out', type, date, FA, nest, cf) if os.path.isfile(f): hs = cb.hsize(os.path.getsize(f)) y, m, d = dateu.parse_date(date) dweek = dateu.parse_date(date).weekday() if mdkeep == -1: MDkeep = dateu.mndays( y, m) # last day of month, then calc last day: else: MDkeep = mdkeep if MDkeep and MDkeep == d or MDkeep == (m, d): if not quiet: print >> output, '*Keeping ', date, ' ', f, ' ', hs[0], hs[ 1], '**', MDkeep elif wkeep != -1 and wkeep == dweek: if not quiet: print >> output, '*Keeping ', date, ' ', f, ' ', hs[0], hs[ 1], '*', wkeep else: if not quiet: if clean: print >> output, 'Removing ', date, ' ', f, ' ', hs[ 0], hs[1] else: print >> output, 'Not removing ', date, ' ', f, ' ', hs[ 0], hs[1] if clean: os.remove(f) else: print >> output, date, ' ', FA, ' no file' date = dateu.next_date(date, 1)
def find_prev(cf=CONF): type='rout' date, file=find_last(type,cf=cf) if not date: type='ini' date, file=find_last(type,cf=cf) if date: date=dateu.next_date(date,-1) return date, file, type
def clean_frc(cf,date1,date2,type,FA,nest=0,clean=False,quiet=False,output=sys.stdout): date=date1 while date<=date2: f=opt.nameof('in',type,date,FA,nest,cf) if os.path.isfile(f): if clean: if not quiet: print >>output,'Removing ',date,' ', f os.remove(f) else: print >>output,date,' ',FA,' no file',f date=dateu.next_date(date,1)
def gen_mdates(cf,prev_month=-1): start_date,end_date=opt.dates_info(cf) y1,m1,d1=dateu.parse_date(start_date) y,m,d=dateu.parse_date(dateu.currday()) y2,m2=dateu.next_month(y,m,n=prev_month) dates=dateu.mrange(y1,m1,y2,m2) out=[] for i in range(len(dates)-1): date1=dates[i] date2=dateu.next_date(dates[i+1],-1) out+=[(date1,date2)] return out
def gen_mdates(cf, prev_month=-1): start_date, end_date = opt.dates_info(cf) y1, m1, d1 = dateu.parse_date(start_date) y, m, d = dateu.parse_date(dateu.currday()) y2, m2 = dateu.next_month(y, m, n=prev_month) dates = dateu.mrange(y1, m1, y2, m2) out = [] for i in range(len(dates) - 1): date1 = dates[i] date2 = dateu.next_date(dates[i + 1], -1) out += [(date1, date2)] return out
def prev_option(self,src): ''' Previous source file corresponding to the same time. Example, if the runs start each 6h and give outputs each 3h, the previous option of the 3 hours forecast of the run starting at 12h (time=15h) is the 9h forecast of the run starting at 6h. ''' date = src['date'] hour_start = src['hour_start'] hour_sim = src['hour_sim'] if hour_start==0: date = dateu.next_date(date,-1) hour_start = 24-self.dt_start hour_sim = hour_sim+self.dt_start else: hour_start = hour_start-self.dt_start hour_sim = hour_sim+self.dt_start return self.nameof('src',date,hour_start,hour_sim)
def prev_option(self, src): ''' Previous source file corresponding to the same time. Example, if the runs start each 6h and give outputs each 3h, the previous option of the 3 hours forecast of the run starting at 12h (time=15h) is the 9h forecast of the run starting at 6h. ''' date = src['date'] hour_start = src['hour_start'] hour_sim = src['hour_sim'] if hour_start == 0: date = dateu.next_date(date, -1) hour_start = 24 - self.dt_start hour_sim = hour_sim + self.dt_start else: hour_start = hour_start - self.dt_start hour_sim = hour_sim + self.dt_start return self.nameof('src', date, hour_start, hour_sim)
def list_files(cf,date1,date2,io,type,FA,nest=0,quiet=True): files=cb.odict() date=date1 while date<=date2: f=opt.nameof(io,type,date,FA,nest,cf) if os.path.isfile(f): files[date]=f else: files[date]=False date=dateu.next_date(date,1) if not quiet: for d in files.keys(): if files[d]: f=files[d] hs=cb.hsize(os.path.getsize(f)) print d,' ', f, ' ',hs[0],hs[1] else: print d,' no file' return files
def list_files(cf, date1, date2, io, type, FA, nest=0, quiet=True): files = cb.odict() date = date1 while date <= date2: f = opt.nameof(io, type, date, FA, nest, cf) if os.path.isfile(f): files[date] = f else: files[date] = False date = dateu.next_date(date, 1) if not quiet: for d in files.keys(): if files[d]: f = files[d] hs = cb.hsize(os.path.getsize(f)) print d, ' ', f, ' ', hs[0], hs[1] else: print d, ' no file' return files
def clean_frc(cf, date1, date2, type, FA, nest=0, clean=False, quiet=False, output=sys.stdout): date = date1 while date <= date2: f = opt.nameof('in', type, date, FA, nest, cf) if os.path.isfile(f): if clean: if not quiet: print >> output, 'Removing ', date, ' ', f os.remove(f) else: print >> output, date, ' ', FA, ' no file', f date = dateu.next_date(date, 1)
def clean_nc_files(cf,date1,date2,type,FA,nest=0,mdkeep=0,wkeep=-1,clean=False,quiet=False,output=sys.stdout): ''' mdkeep: day,month to keep ex: mdkeep=1 => keep day 1 of every month ex: mdkeep=(1,1) => keep day 1 of January (month,day) ex: mdkeep=0 => no keep wkeep: day of week to keep: ex: wkeep=0 => keep first day of week (synday) ex: wkeep=-1 => no keep ''' date=date1 while date<=date2: f=opt.nameof('out',type,date,FA,nest,cf) if os.path.isfile(f): hs=cb.hsize(os.path.getsize(f)) y,m,d=dateu.parse_date(date) dweek=dateu.parse_date(date).weekday() if mdkeep==-1: MDkeep=dateu.mndays(y,m) # last day of month, then calc last day: else: MDkeep=mdkeep if MDkeep and MDkeep==d or MDkeep==(m,d): if not quiet: print >>output,'*Keeping ',date,' ', f, ' ',hs[0],hs[1],'**', MDkeep elif wkeep!=-1 and wkeep==dweek: if not quiet: print >>output,'*Keeping ',date,' ', f, ' ',hs[0],hs[1],'*', wkeep else: if not quiet: if clean: print >>output,'Removing ',date,' ', f, ' ',hs[0],hs[1] else: print >>output,'Not removing ',date,' ', f, ' ',hs[0],hs[1] if clean: os.remove(f) else: print >>output,date,' ',FA,' no file' date=dateu.next_date(date,1)
def gen_rivfrc(date,FA='a',nest=0,cf=CONF): date=dateu.parse_date(date) err='' isFatal=False fname=opt.nameof('in','frc',date=date,FA=FA,nest=nest,cf=cf) grd=opt.nameof('in','grd',cf=cf) if os.path.isfile(fname): err='RIVERS file already exists' isFatal=False else: nforec=opt.n_pred(cf) import get_rivers date1=None if FA=='f': date1=dateu.parse_date(dateu.next_date(date,nforec)) try: err=get_rivers.gen_frc(fname,grd,date,date1) except: err='ERROR creating rivers file' if err: isFatal=True return err, isFatal, fname
def clean_ini(cf,date1,date2,FA='a',nest=0,clean=False,quiet=False,output=sys.stdout): '''Create text file with the current ini links If the ini file is a link is can be unlinked; if it is a file it is not removed ! ''' def read_log(f): out=cb.odict() if os.path.isfile(f): L=open(log).readlines() for l in L: tmp=l.split(' ',1) sdate=tmp[0] scontents=tmp[1].rstrip().split(' + ') out[tmp[0]]=scontents return out def gen_log(f,L): i=open(f,'w') keys=L.keys() keys.sort() for d in keys: scontents=sjoin(L[d],' + ') i.write('%s %s\n' % (d,scontents)) i.close() def add2log(f,add): L=read_log(f) L0=L.copy() sdate=add[0] contents=add[1].rstrip() if L.has_key(sdate) and contents not in L[sdate]: L[sdate]+=[contents] #if not quiet: print >>output,' +'+sdate+' '+contents elif not L.has_key(sdate): L[sdate]=[contents] #if not quiet: print >>output,' '+sdate+' '+contents if L!=L0: gen_log(f,L) return True else: return False # file has not changed files=[] date=date1 log='ini_log_%s_%s.txt' % (date1,date2) p=opt.pathof(cf,'inputs','ini') log=os.path.join(p,log) if not quiet: print >>output,'Creating/updating ini log %s' % log HasChanged=False while date<=date2: f=opt.nameof('in','ini',date,FA,nest,cf) if os.path.islink(f): src=os.readlink(f) add=(date,'%s --> %s' % (f,src)) hasChanged=add2log(log,add) HasChanged=HasChanged or hasChanged if clean: # remove link: if not quiet: print >>output,'removing %s' % f os.remove(f) elif os.path.isfile(f): hs=cb.hsize(os.path.getsize(f)) ssize='%s %s' % (str(hs[0]),hs[1]) add=(date,'%s --> %s' % (f,ssize)) hasChanged=add2log(log,add) HasChanged=HasChanged or hasChanged if not quiet: print >>output,date,' keeping ', f, ' ',hs[0],hs[1] else: print >>output,date,' no file' date=dateu.next_date(date,1) if not quiet: if HasChanged: print >>output,' file has changed' else: print >>output,' file has not changed'
def oof(cf,plconf,date=False,last_date=False,FA='a',env=False): # start email notifications service: emailInfo=opt.email_info(cf=cf) sendEmail=emailInfo['send'] #if sendEmail: sys.stdout=opt.Redirect() env_vars(env) flags=opt.flags_info(cf) if date: date=dateu.parse_date(date) if last_date: last_date=dateu.parse_date(last_date) if not date: # find date-1 for prediction: date,file=find_last(type='rst',cf=cf) if not date: on_error(sendEmail,'ERROR (%s): Cannot find previous file'%FA,emailInfo) return else: print 'Last date = %s from file %s' % (date,file) rout=opt.nameof('out','rout',date=date,FA='a',cf=cf) if is_roms_out_ok(rout): print 'Previous roms out is ok: %s' % rout else: on_error(sendEmail,'ERROR (%s): Last run is not ok %s : %s' % (FA,date,rout),emailInfo) return else: date=dateu.next_date(date,-1) # read dates: start_date,end_date=opt.dates_info(cf) if last_date: end_date=dateu.next_date(last_date,+1) while date >= start_date and date < end_date: # read dates again, possible update may occur. start_date,end_date=opt.dates_info(cf) if last_date: end_date=dateu.next_date(last_date,+1) date=dateu.next_date(date) # check if already runned for that date: # ie, check for rst and check if roms_out is ok: rst=opt.nameof('out','rst',date=date,FA=FA,cf=cf) rout=opt.nameof('out','rout',date=date,FA=FA,cf=cf) if os.path.isfile(rst): print 'Found rst file for %s: %s' % (date,rst) if os.path.isfile(rout): if is_roms_out_ok(rout): print ' previous roms out is ok: %s' % rout else: on_error(sendEmail,'ERROR (%s): Previous roms out is NOT ok: %s' % (FA,rout),emailInfo) break else: print ' roms out for %s not found: NOT CHECKED' % date else: print '\nModel will start from %s' % date # check for atm data for current simulation: if flags['atmfrc'] or flags['atmblk']: atmStat=check_atm(date,FA,cf=cf) else: atmStat=True ## wait for rst in case of fa==F: ##if FA=='f': rstStat=check_rst(date,cf=cf) ##else: rstStat=True rstStat=check_rst(date,cf=cf) # check for bondary data for current simulation: if flags['clmbry']: # this step may take forever !! just let us belive parent model is available #bcStat=check_bc(date,FA,cf=cf) bcStat=True else: bcStat=True now=time.strftime("%Y-%m-%d %H:%M:%S +0",time.gmtime()) if (not atmStat is False) and (not rstStat is False) and (not bcStat is False): rout,dt,runErr=run(date,FA,cf=cf) now=time.strftime("%Y-%m-%d %H:%M:%S +0",time.gmtime()) # check if run was ok: if is_roms_out_ok(rout): msg='NO error %s %s'%(date,FA) Msg=' Run %s %s finished ok [%s] dt=%6.2f' % (date,FA,now,dt) print Msg # make plots: if flags['plots']: err,savenames=op_plot.op_plt(cf,plconf,date,FA) if not all(e=='' for e in err): msg+=' --> ERROR plotting' print ' ERROR plotting : ', for e in err: print e if not all(e=='' for e in savenames): for sv in savenames: print ' Saved plot '+sv elif runErr: on_error(sendEmail,'ERROR (%s): Run %s returned the error msg: %s' % (FA,date,runErr),emailInfo) break else: on_error(sendEmail,'ERROR (%s): Run %s finished with ERROR [%s] dt=%6.2f' % (FA,date,now,dt),emailInfo) break elif atmStat is False: Msg='ERROR (%s): Run %s cannot run (atm data missing) ERROR [%s]' % (FA,date,FA,now) if FA=='a': on_error(sendEmail,Msg,emailInfo) break else: msg='ERROR: atm data missing' print Msg elif rstStat is False: msg='ERROR: rst data missing' Msg='ERROR (%s): Run %s cannot run (atm data missing) ERROR [%s]' % (FA,date,now) print Msg print '\n' if sendEmail: send_email.send(emailInfo['dest'],Msg,msg)
def plt_wind_rose(conf, plconf, date, FA='a', nest=0, **kargs): err = '' fig = [] info = [] ifig = kargs.get('ifig', 0) quiet = kargs.get('quiet', 0) place = kargs.get('place', False) day = kargs.get('day', 'all') date = dateu.parse_date(date) # find input files: args = {'cf': conf, 'date': date, 'FA': FA, 'nest': nest} atm = opt.nameof('in', 'blk', **args) grd = opt.nameof('in', 'grd', **args) if not os.path.isfile(atm): err = 'ATM file not found (%s)' % atm return err, fig, info if not os.path.isfile(grd): err = 'Grid file not found (%s)' % grd return err, fig, info # get conf data and places: Data, err = opt.get_plconf(plconf, 'WINDR') Places, err = opt.get_plconf(plconf, 'WINDR_PLACES') figpos = Data['fig_size'][ifig] axpos = Data['ax_pos'][ifig] fontsize = Data['fontsize'][ifig] linewidth = Data['linewidth'][ifig] legType = Data['legtype'][ifig] intensities = Data['intensities'][ifig] percentages = Data['percentages'][ifig] Title = Data['title'][ifig] labels = Data['labels'][ifig] places = {} for k in Places.keys(): places[k] = Places[k][ifig] # get data from atm file: f = roms.Blk(atm, grd) for k in places.keys(): if place and place != k.strip('_'): continue lon, lat = places[k][:2] Ndays = int(np.ceil(f.tdays[-1] - f.tdays[0])) if day == 'all': Days = range(Ndays) else: Days = [day] for Day in Days: time, u, v = f.get('wind_ts', lon=lon, lat=lat, day=Day) # calc angle and intensity: D = np.arctan2(v, u) * 180 / np.pi F = np.sqrt(u**2 + v**2) # wind_rose: args = { 'fontsize': fontsize, 'linewidth': linewidth, 'figpos': figpos, 'axpos': axpos, 'legtype': legType, 'labels': labels } # tilte: if Title: placeStr = places[k][2] #k.strip('_').replace('_',' ') simpleTitle = 1 rdate = date.strftime('%d-%m-%Y') title = '%s %s %s %d' % (placeStr, rdate, FA, Day) if simpleTitle: # simpler version of title: if FA == 'f': # forecast date: rdate = dateu.next_date(date, Day) rdate = rdate.strftime('%d-%m-%Y') title = '%s %s' % (placeStr, rdate) if FA == 'f': title = title + ' (forec)' args['labtitle'] = title args['lablegend'] = 'wind m/s' tmp = wr.wind_rose(D, F, di=intensities, ci=percentages, **args) fig += [tmp] info += [(k, Day)] return err, fig, info
def update_wind_blended2(fname, datapaths, **kargs): """ In days without blended data will try to use quikscat data """ from okean.datasets import quikscat from okean.datasets import blended_wind a = blended_wind.WINDData(datapaths[0]) b = quikscat.WINDData(datapaths[1]) time = netcdf.nctime(fname, "time") date0 = dts.next_date(time[0], -1) date1 = dts.next_date(time[-1], +2) data = a.data(date0, date1) # limit are... otherwise, quikscat interp will be very slow! grd = netcdf.fatt(fname, "grd_file") import os if not os.path.isfile(grd): grd = kargs["grd"] cond, inds = rt.grid_vicinity(grd, data["x"], data["y"], margin=5, rect=True, retinds=True) i1, i2, j1, j2 = inds for d in data.keys(): if d == "x": data[d] = data[d][i1:i2] elif d == "y": data[d] = data[d][j1:j2] else: data[d] = data[d][j1:j2, i1:i2] # check for missing days: time0 = data.keys() x0 = data["x"] y0 = data["y"] x0, y0 = np.meshgrid(x0, y0) time0.remove("x") time0.remove("y") out = cb.odict() out["x"] = x0 out["y"] = y0 info = "" qs_ij_limits_done = False for d in dts.drange(date0, date1): found = 0 for t in time0: if (t.year, t.month, t.day) == (d.year, d.month, d.day): print "==> blended : ", t out[t] = data[t] found = 1 if not found: # use quikscat: print "==> quikscat : ", d.strftime("%Y-%m-%d") tmp = b.data(d, dts.next_date(d)) if not tmp.has_key("x"): continue x, y = tmp["x"], tmp["y"] x, y = np.meshgrid(x, y) # reduce qs data: if not qs_ij_limits_done: i1, i2, j1, j2 = calc.ij_limits(x, y, [x0.min(), x0.max()], [y0.min(), y0.max()]) qs_ij_limits_done = True x = x[j1:j2, i1:i2] y = y[j1:j2, i1:i2] tmp[tmp.keys()[0]] = tmp[tmp.keys()[0]][j1:j2, i1:i2] print " griddata u" u = calc.griddata(x, y, tmp[tmp.keys()[0]].real, x0, y0) print " griddata v" v = calc.griddata(x, y, tmp[tmp.keys()[0]].imag, x0, y0) out[tmp.keys()[0]] = u + 1.0j * v info += "#" + d.strftime("%Y%m%d") new_wind_info = "blended+quikscat at days: " + info update_wind(fname, out, new_wind_info, **kargs)
def plt_wind_rose(conf,plconf,date,FA='a',nest=0,**kargs): err = '' fig = [] info = [] ifig = kargs.get('ifig',0) quiet = kargs.get('quiet',0) place = kargs.get('place',False) day = kargs.get('day','all') date=dateu.parse_date(date) # find input files: args={'cf':conf,'date':date,'FA':FA,'nest':nest} atm = opt.nameof('in','blk',**args) grd = opt.nameof('in','grd',**args) if not os.path.isfile(atm): err='ATM file not found (%s)' % atm return err,fig,info if not os.path.isfile(grd): err='Grid file not found (%s)' % grd return err,fig,info # get conf data and places: Data,err = opt.get_plconf(plconf,'WINDR') Places,err = opt.get_plconf(plconf,'WINDR_PLACES') figpos = Data['fig_size'][ifig] axpos = Data['ax_pos'][ifig] fontsize = Data['fontsize'][ifig] linewidth = Data['linewidth'][ifig] legType = Data['legtype'][ifig] intensities = Data['intensities'][ifig] percentages = Data['percentages'][ifig] Title = Data['title'][ifig] labels = Data['labels'][ifig] places={} for k in Places.keys(): places[k]=Places[k][ifig] # get data from atm file: f=roms.Blk(atm,grd) for k in places.keys(): if place and place!=k.strip('_'): continue lon,lat=places[k][:2] Ndays=int(np.ceil(f.tdays[-1]-f.tdays[0])) if day=='all': Days=range(Ndays) else: Days=[day] for Day in Days: time,u,v = f.get('wind_ts',lon=lon,lat=lat,day=Day) # calc angle and intensity: D=np.arctan2(v,u)*180/np.pi F=np.sqrt(u**2+v**2) # wind_rose: args={'fontsize':fontsize,'linewidth':linewidth,'figpos':figpos, 'axpos':axpos,'legtype':legType,'labels':labels} # tilte: if Title: placeStr=places[k][2]#k.strip('_').replace('_',' ') simpleTitle=1 rdate=date.strftime('%d-%m-%Y') title='%s %s %s %d' % (placeStr,rdate,FA,Day) if simpleTitle: # simpler version of title: if FA=='f': # forecast date: rdate=dateu.next_date(date,Day); rdate=rdate.strftime('%d-%m-%Y') title='%s %s' % (placeStr,rdate) if FA=='f': title=title+' (forec)' args['labtitle']=title args['lablegend']='wind m/s' tmp=wr.wind_rose(D,F,di=intensities,ci=percentages,**args) fig+=[tmp] info+=[(k,Day)] return err,fig,info
def plt_wind(conf, plconf, date, FA='a', nest=0, **kargs): err = '' fig = False info = '' ifig = kargs.get('ifig', 0) day = kargs.get('day', 0) quiet = kargs.get('quiet', 0) time = day date = dateu.parse_date(date) # find input files: args = {'cf': conf, 'date': date, 'FA': FA, 'nest': nest} atm = opt.nameof('in', 'blk', **args) grd = opt.nameof('in', 'grd', **args) if not os.path.isfile(atm): err = 'ATM file not found (%s)' % atm return err, fig, info if not os.path.isfile(grd): err = 'Grid file not found (%s)' % grd return err, fig, info Data, err = opt.get_plconf(plconf, 'WIND') dcurr = Data['dcurr'][ifig] lcurr = Data['lcurr'][ifig] scurr = Data['scurr'][ifig] clim = Data['clim'][ifig] tind = Data['time'][ifig] x = netcdf.use(grd, 'lon_rho') y = netcdf.use(grd, 'lat_rho') wtime = netcdf.nctime(atm, 'time') cnd = (wtime >= date + datetime.timedelta(days=day)) & ( date < date + datetime.timedelta(days=day + 1)) u = netcdf.use(atm, 'Uwind', time=cnd) v = netcdf.use(atm, 'Uwind', time=cnd) if tind == 'dailyMean': u = u.mean(0) v = v.mean(0) sdate = wtime[cnd][ 0] # for title... 1st day 00h is expected to be 1st date, # or model should not run! else: # tind of some day, ex: tind 0 from forec day 3 u = u[tind] v = v[tind] sdate = wtime[cnd][tind] if day > len(u) - 1: err = 'Invalid day %d (max=%d)' % (day, len(u) - 1) return err, fig, info # plot grid: proj, fig, ax = plt_grid(plconf, grd, ifig) # no mask on land: mask = np.zeros(u.shape, 'bool') mask[::dcurr[0], ::dcurr[1]] = True xm, ym = proj(x, y) s = np.sqrt(u**2 + v**2) q = pl.quiver(xm[mask], ym[mask], u[mask], v[mask], s[mask], scale=scurr, zorder=100) pl.clim(clim[0], clim[1]) def add_colorbar(handle, **args): ax = pl.gca() Data, err = opt.get_plconf(plconf, 'AXES') cbpos = Data['cbpos'][ifig] cbbgpos = Data['cbbgpos'][ifig] cbbgc = Data['cbbgcolor'][ifig] cbbga = Data['cbbgalpha'][ifig] cblab = Data['cblabel'][ifig] # colorbar bg axes: if cbbgpos: rec = pl.axes((cbpos[0] - cbpos[2] * cbbgpos[0], cbpos[1] - cbbgpos[2] * cbpos[3], cbpos[2] * (1 + cbbgpos[0] + cbbgpos[1]), cbpos[3] * (1 + cbbgpos[2] + cbbgpos[3])), axisbg=cbbgc, frameon=1) rec.patch.set_alpha(cbbga) rec.set_xticks([]) rec.set_yticks([]) for k in rec.axes.spines.keys(): rec.axes.spines[k].set_color(cbbgc) rec.axes.spines[k].set_alpha(cbbga) # colorbar: if cbpos: cbax = fig.add_axes(cbpos) if cbpos[2] > cbpos[3]: orient = 'horizontal' else: orient = 'vertical' cb = pl.colorbar(handle, cax=cbax, orientation=orient, drawedges=0, **args) pl.axes(ax) # colorbar label: cb.set_label(r'Wind Speed [m s$^{\rm{-1}}$]') def add_currkey(handle): pos = Data['kcurrpos'][ifig] if pos: pl.quiverkey(handle, pos[0], pos[1], lcurr, '%s m/s' % str(lcurr), labelpos='S', coordinates='axes') add_colorbar(q) add_currkey(q) # tilte: Title, err = opt.get_plconf(plconf, 'AXES', 'title') if Title[ifig]: simpleTitle = 1 rdate = date.strftime('%d-%m-%Y') title = 'wind %s %s %d' % (rdate, FA, day) if simpleTitle: # simpler version of title: if FA == 'f': # forecast date: rdate = dateu.next_date(date, day) rdate = rdate.strftime('%d-%m-%Y') title = 'wind %s' % (rdate) if FA == 'f': title = title + ' (forec)' pl.title(title) # logo: if ifig == 0: im = os.path.join(os.path.dirname(__file__), 'logo_INOCAR.png') i = pl.imread(im) h, w = i.shape[:2] rx = .12 W = (proj.xmax - proj.xmin) * rx H = W * h / w l = proj.xmax #pl.fill([proj.xmax-W, proj.xmax, proj.xmax, proj.xmax-W], # [proj.ymin, proj.ymin, proj.ymin+2.8*H, proj.ymin+2.8*H], # '#500000',alpha=0.25,ec='none') ax.imshow(i, extent=(proj.xmax * .98 - W, proj.xmax * .98, proj.ymin + H * .1, proj.ymin + H * 1.1), zorder=1e3) #pl.text(proj.xmax-W/2., proj.ymin+2.2*H,'OOF', # fontdict={'size':14,'family':'serif'}, # color='#500000',ha='center',weight='bold') pl.text( proj.xmax * .8, proj.ymax * (-.1), sdate.strftime("%d %b %Y"), #pl.text(proj.xmax*.62, proj.ymax*.93,sdate.strftime("%d %b %Y"), fontdict={ 'size': 13, 'family': 'monospace' }, ha='center') # change date format if tind is not daily mean, ie, add hour, etc if FA == 'f': s = 'Pronostico desde %s' % date.strftime("%d %b %Y") pl.text( proj.xmax * .8, proj.ymax * (-.15), s, ##this is outside #pl.text(proj.xmax-W/2., proj.ymin+1.1*H,s, ##this is in the proj (inside) fontdict={'fontsize': 10}, ha='center') # logo. # lims change in some mpl versions !! pl.gca().axis([proj.xmin, proj.xmax, proj.ymin, proj.ymax]) return err, fig, info
def op_plt_hslice(conf, plconf, date, FA, nest=0, **kargs): closefig = kargs.get('closefig', True) clearfig = kargs.get('clearfig', True) save = kargs.get('save', True) Varname = kargs.get('vname', False) Depth = kargs.get('depth', False) date = dateu.parse_date(date) Err = [] Out = [] pltpath = opt.nameof('out', 'plots', cf=conf) Figure, err = opt.get_plconf(plconf, 'FIGURE') Hslices, err = opt.get_plconf(plconf, 'HSLICES') Title, err = opt.get_plconf(plconf, 'AXES', 'title') def iterv(v): try: len(v) except: v = [v] return v for i in range(len(Figure.values()[0])): # loop figures Varnames = Hslices['varnames'][i].split(',') Varnames = [s.strip() for s in Varnames] depths = Hslices['depths'][i] addcurr = Hslices['addcurr'][i] # clims = Hslices['clim'][i] depths = iterv(depths) addcurr = iterv(addcurr) # clims = iterv(clims) kargs = {} kargs['ifig'] = i kargs['type'] = Hslices['input'][i] #kargs['time']=Inputs['time'][i] TIMES = Hslices['time'][i] kargs['dcurr'] = Hslices['dcurr'][i] kargs['scurr'] = Hslices['scurr'][i] kargs['lcurr'] = Hslices['lcurr'][i] if Varname: varnames = [Varname] else: varnames = Varnames for var in varnames: # loop variables var = var.strip() nv = Varnames.index(var) kargs['cmap'] = Hslices['cmap'][i].split(',')[nv].strip() kargs['norm'] = Hslices['norm'][i].split(',')[nv].strip() kargs['var'] = var vdepths = depths[nv] try: iter(vdepths) except: vdepths = [vdepths] if Depth: vdepths = [Depth] nz = -1 for z in vdepths: # loop depths nz += 1 if z in ('s', 'surf', 'surface') or z >= 0: kargs['ind'] = z kargs['slice'] = 'k' else: kargs['ind'] = z kargs['slice'] = 'z' cl = opt.get_clims(date, var, z, plconf)[i] kargs['clim'] = cl for addc in addcurr: # add/no currents kargs['currents'] = addc # loop over time: if TIMES == 'all': if FA == 'a': times = [-1] else: times = range( int(opt.get_conf(conf, 'PRED', 'npred')[0])) else: times = [TIMES] for it in times: kargs['time'] = it e, fig, info = plt_hslice(conf, plconf, date, FA, nest, **kargs) Err += [e] if not e: if var: svar = var else: svar = '' if addc: scurr = 'curr' else: scurr = '' if info['hasz']: sslice = kargs['slice'] + '_' + str( kargs['ind']) else: sslice = '' if date.hour > 0: sdate = date.strftime('%Y%m%d_%H') else: sdate = date.strftime('%Y%m%d') savename = '%s_%s_n%d_%s_%d_%s_%s_%s_%s' % ( sdate, FA, nest, kargs['type'], kargs['time'], svar, scurr, sslice, Figure['tag'][i]) if Title[i]: simpleTitle = 1 rdate = date.strftime('%d-%m-%Y') if sslice: sslice = sslice[2:] title = '%s#%s#n%d#%s#%d#%s#%s#%s' % ( rdate, FA, nest, kargs['type'], kargs['time'], svar, scurr, sslice) if simpleTitle: # simpler version of title: if FA == 'f': # forecast date: rdate = dateu.next_date( date, kargs['time']) rdate = date.strftime('%d-%m-%Y') title = '%s#%s#%s#%s' % (rdate, svar, scurr, sslice) if FA == 'f': title = title + ' (forec)' while 1: if title.find('##') > -1: title = title.replace('##', '#') else: break title = title.replace('#', ' ') pl.title(title) while 1: if savename.find('__') > -1: savename = savename.replace('__', '_') else: break savename = savename.strip('_') if save: Y, M, D = date.year, date.month, date.day, Ydest = os.path.join(pltpath, '%s' % Y) if not os.path.isdir(Ydest): os.mkdir(Ydest) MDdest = os.path.join(Ydest, '%02d_%02d' % (M, D)) if not os.path.isdir(MDdest): os.mkdir(MDdest) savename = os.path.join( MDdest, savename + '.' + Figure['extension'][i]) Out += [savename] pl.savefig(savename, dpi=pl.gcf().dpi) #pl.savefig(savename,dpi=300) if clearfig: fig.clear() if closefig: pl.close(fig) return Err, Out
def update_wind_blended2(fname, datapaths, **kargs): ''' In days without blended data will try to use quikscat data ''' from okean.datasets import quikscat from okean.datasets import blended_wind a = blended_wind.WINDData(datapaths[0]) b = quikscat.WINDData(datapaths[1]) time = netcdf.nctime(fname, 'time') date0 = dts.next_date(time[0], -1) date1 = dts.next_date(time[-1], +2) data = a.data(date0, date1) # limit are... otherwise, quikscat interp will be very slow! grd = netcdf.fatt(fname, 'grd_file') import os if not os.path.isfile(grd): grd = kargs['grd'] cond, inds = rt.grid_vicinity(grd, data['x'], data['y'], margin=5, rect=True, retinds=True) i1, i2, j1, j2 = inds for d in data.keys(): if d == 'x': data[d] = data[d][i1:i2] elif d == 'y': data[d] = data[d][j1:j2] else: data[d] = data[d][j1:j2, i1:i2] # check for missing days: time0 = data.keys() x0 = data['x'] y0 = data['y'] x0, y0 = np.meshgrid(x0, y0) time0.remove('x') time0.remove('y') out = OrderedDict() out['x'] = x0 out['y'] = y0 info = '' qs_ij_limits_done = False for d in dts.drange(date0, date1): found = 0 for t in time0: if (t.year, t.month, t.day) == (d.year, d.month, d.day): print('==> blended : ', t) out[t] = data[t] found = 1 if not found: # use quikscat: print('==> quikscat : ', d.strftime('%Y-%m-%d')) tmp = b.data(d, dts.next_date(d)) if not tmp.has_key('x'): continue x, y = tmp['x'], tmp['y'] x, y = np.meshgrid(x, y) # reduce qs data: if not qs_ij_limits_done: i1, i2, j1, j2 = calc.ij_limits(x, y, [x0.min(), x0.max()], [y0.min(), y0.max()]) qs_ij_limits_done = True x = x[j1:j2, i1:i2] y = y[j1:j2, i1:i2] tmp[tmp.keys()[0]] = tmp[tmp.keys()[0]][j1:j2, i1:i2] print(' griddata u') u = calc.griddata(x, y, tmp[tmp.keys()[0]].real, x0, y0) print(' griddata v') v = calc.griddata(x, y, tmp[tmp.keys()[0]].imag, x0, y0) out[tmp.keys()[0]] = u + 1.j * v info += '#' + d.strftime('%Y%m%d') new_wind_info = 'blended+quikscat at days: ' + info update_wind(fname, out, new_wind_info, **kargs)
def clean_ini(cf, date1, date2, FA='a', nest=0, clean=False, quiet=False, output=sys.stdout): '''Create text file with the current ini links If the ini file is a link is can be unlinked; if it is a file it is not removed ! ''' def read_log(f): out = cb.odict() if os.path.isfile(f): L = open(log).readlines() for l in L: tmp = l.split(' ', 1) sdate = tmp[0] scontents = tmp[1].rstrip().split(' + ') out[tmp[0]] = scontents return out def gen_log(f, L): i = open(f, 'w') keys = L.keys() keys.sort() for d in keys: scontents = sjoin(L[d], ' + ') i.write('%s %s\n' % (d, scontents)) i.close() def add2log(f, add): L = read_log(f) L0 = L.copy() sdate = add[0] contents = add[1].rstrip() if L.has_key(sdate) and contents not in L[sdate]: L[sdate] += [contents] #if not quiet: print >>output,' +'+sdate+' '+contents elif not L.has_key(sdate): L[sdate] = [contents] #if not quiet: print >>output,' '+sdate+' '+contents if L != L0: gen_log(f, L) return True else: return False # file has not changed files = [] date = date1 log = 'ini_log_%s_%s.txt' % (date1, date2) p = opt.pathof(cf, 'inputs', 'ini') log = os.path.join(p, log) if not quiet: print >> output, 'Creating/updating ini log %s' % log HasChanged = False while date <= date2: f = opt.nameof('in', 'ini', date, FA, nest, cf) if os.path.islink(f): src = os.readlink(f) add = (date, '%s --> %s' % (f, src)) hasChanged = add2log(log, add) HasChanged = HasChanged or hasChanged if clean: # remove link: if not quiet: print >> output, 'removing %s' % f os.remove(f) elif os.path.isfile(f): hs = cb.hsize(os.path.getsize(f)) ssize = '%s %s' % (str(hs[0]), hs[1]) add = (date, '%s --> %s' % (f, ssize)) hasChanged = add2log(log, add) HasChanged = HasChanged or hasChanged if not quiet: print >> output, date, ' keeping ', f, ' ', hs[0], hs[1] else: print >> output, date, ' no file' date = dateu.next_date(date, 1) if not quiet: if HasChanged: print >> output, ' file has changed' else: print >> output, ' file has not changed'
def update_wind_blended2(fname,datapaths,**kargs): ''' In days without blended data will try to use quikscat data ''' from okean.datasets import quikscat from okean.datasets import blended_wind a=blended_wind.WINDData(datapaths[0]) b=quikscat.WINDData(datapaths[1]) time=netcdf.nctime(fname,'time') date0=dts.next_date(time[0],-1) date1=dts.next_date(time[-1],+2) data=a.data(date0,date1) # limit are... otherwise, quikscat interp will be very slow! grd=netcdf.fatt(fname,'grd_file') import os if not os.path.isfile(grd): grd=kargs['grd'] cond,inds=rt.grid_vicinity(grd,data['x'],data['y'],margin=5,rect=True,retinds=True) i1,i2,j1,j2=inds for d in data.keys(): if d == 'x': data[d]=data[d][i1:i2] elif d == 'y': data[d]=data[d][j1:j2] else: data[d]=data[d][j1:j2,i1:i2] # check for missing days: time0=data.keys() x0=data['x'] y0=data['y'] x0,y0=np.meshgrid(x0,y0) time0.remove('x') time0.remove('y') out=OrderedDict() out['x']=x0 out['y']=y0 info='' qs_ij_limits_done=False for d in dts.drange(date0,date1): found=0 for t in time0: if (t.year,t.month,t.day)==(d.year,d.month,d.day): print('==> blended : ',t) out[t]=data[t] found=1 if not found: # use quikscat: print('==> quikscat : ',d.strftime('%Y-%m-%d')) tmp= b.data(d,dts.next_date(d)) if not tmp.has_key('x'): continue x,y=tmp['x'],tmp['y'] x,y=np.meshgrid(x,y) # reduce qs data: if not qs_ij_limits_done: i1,i2,j1,j2=calc.ij_limits(x,y,[x0.min(),x0.max()],[y0.min(),y0.max()]) qs_ij_limits_done=True x=x[j1:j2,i1:i2] y=y[j1:j2,i1:i2] tmp[tmp.keys()[0]]=tmp[tmp.keys()[0]][j1:j2,i1:i2] print(' griddata u') u=calc.griddata(x,y,tmp[tmp.keys()[0]].real,x0,y0) print(' griddata v') v=calc.griddata(x,y,tmp[tmp.keys()[0]].imag,x0,y0) out[tmp.keys()[0]]=u+1.j*v info+='#'+d.strftime('%Y%m%d') new_wind_info='blended+quikscat at days: '+info update_wind(fname,out,new_wind_info,**kargs)
def plt_wind(conf,plconf,date,FA='a',nest=0,**kargs): err = '' fig = False info = '' ifig = kargs.get('ifig',0) day = kargs.get('day',0) quiet = kargs.get('quiet',0) time=day date=dateu.parse_date(date) # find input files: args={'cf':conf,'date':date,'FA':FA,'nest':nest} atm = opt.nameof('in','blk',**args) grd = opt.nameof('in','grd',**args) if not os.path.isfile(atm): err='ATM file not found (%s)' % atm return err,fig,info if not os.path.isfile(grd): err='Grid file not found (%s)' % grd return err,fig,info Data,err = opt.get_plconf(plconf,'WIND') dcurr=Data['dcurr'][ifig] lcurr=Data['lcurr'][ifig] scurr=Data['scurr'][ifig] clim =Data['clim'][ifig] tind = Data['time'][ifig] x=netcdf.use(grd,'lon_rho') y=netcdf.use(grd,'lat_rho') wtime=netcdf.nctime(atm,'time') cnd=(wtime>=date+datetime.timedelta(days=day))&(date<date+datetime.timedelta(days=day+1)) u=netcdf.use(atm,'Uwind',time=cnd) v=netcdf.use(atm,'Uwind',time=cnd) if tind=='dailyMean': u=u.mean(0) v=v.mean(0) sdate=wtime[cnd][0] # for title... 1st day 00h is expected to be 1st date, # or model should not run! else: # tind of some day, ex: tind 0 from forec day 3 u=u[tind] v=v[tind] sdate=wtime[cnd][tind] if day>len(u)-1: err='Invalid day %d (max=%d)' % (day,len(u)-1) return err,fig,info # plot grid: proj,fig,ax= plt_grid(plconf,grd,ifig) # no mask on land: mask=np.zeros(u.shape,'bool') mask[::dcurr[0],::dcurr[1]]=True xm, ym = proj(x,y) s=np.sqrt(u**2+v**2) q=pl.quiver(xm[mask],ym[mask],u[mask],v[mask],s[mask],scale=scurr,zorder=100) pl.clim(clim[0],clim[1]) def add_colorbar(handle,**args): ax=pl.gca() Data,err = opt.get_plconf(plconf,'AXES') cbpos = Data['cbpos'][ifig] cbbgpos = Data['cbbgpos'][ifig] cbbgc = Data['cbbgcolor'][ifig] cbbga = Data['cbbgalpha'][ifig] cblab = Data['cblabel'][ifig] # colorbar bg axes: if cbbgpos: rec=pl.axes((cbpos[0]-cbpos[2]*cbbgpos[0],cbpos[1]-cbbgpos[2]*cbpos[3], cbpos[2]*(1+cbbgpos[0]+cbbgpos[1]),cbpos[3]*(1+cbbgpos[2]+cbbgpos[3])), axisbg=cbbgc,frameon=1) rec.patch.set_alpha(cbbga) rec.set_xticks([]) rec.set_yticks([]) for k in rec.axes.spines.keys(): rec.axes.spines[k].set_color(cbbgc) rec.axes.spines[k].set_alpha(cbbga) # colorbar: if cbpos: cbax=fig.add_axes(cbpos) if cbpos[2]>cbpos[3]: orient='horizontal' else: orient='vertical' cb=pl.colorbar(handle,cax=cbax,orientation=orient,drawedges=0,**args) pl.axes(ax) # colorbar label: cb.set_label(r'Wind Speed [m s$^{\rm{-1}}$]') def add_currkey(handle): pos=Data['kcurrpos'][ifig] if pos: pl.quiverkey(handle, pos[0], pos[1], lcurr, '%s m/s' % str(lcurr),labelpos='S', coordinates='axes') add_colorbar(q) add_currkey(q) # tilte: Title,err=opt.get_plconf(plconf,'AXES','title') if Title[ifig]: simpleTitle=1 rdate=date.strftime('%d-%m-%Y') title='wind %s %s %d' % (rdate,FA,day) if simpleTitle: # simpler version of title: if FA=='f': # forecast date: rdate=dateu.next_date(date,day); rdate=rdate.strftime('%d-%m-%Y') title='wind %s' % (rdate) if FA=='f': title=title+' (forec)' pl.title(title) # logo: if ifig==0: im=os.path.join(os.path.dirname(__file__),'logo_INOCAR.png') i=pl.imread(im) h,w=i.shape[:2] rx=.12 W=(proj.xmax- proj.xmin)*rx H=W*h/w l=proj.xmax #pl.fill([proj.xmax-W, proj.xmax, proj.xmax, proj.xmax-W], # [proj.ymin, proj.ymin, proj.ymin+2.8*H, proj.ymin+2.8*H], # '#500000',alpha=0.25,ec='none') ax.imshow(i,extent=(proj.xmax*.98-W,proj.xmax*.98, proj.ymin+H*.1, proj.ymin+H*1.1),zorder=1e3) #pl.text(proj.xmax-W/2., proj.ymin+2.2*H,'OOF', # fontdict={'size':14,'family':'serif'}, # color='#500000',ha='center',weight='bold') pl.text(proj.xmax*.8, proj.ymax*(-.1),sdate.strftime("%d %b %Y"), #pl.text(proj.xmax*.62, proj.ymax*.93,sdate.strftime("%d %b %Y"), fontdict={'size':13,'family':'monospace'},ha='center') # change date format if tind is not daily mean, ie, add hour, etc if FA=='f': s='Pronostico desde %s' % date.strftime("%d %b %Y") pl.text(proj.xmax*.8, proj.ymax*(-.15),s, ##this is outside #pl.text(proj.xmax-W/2., proj.ymin+1.1*H,s, ##this is in the proj (inside) fontdict={'fontsize':10},ha='center') # logo. # lims change in some mpl versions !! pl.gca().axis([proj.xmin, proj.xmax, proj.ymin, proj.ymax]) return err,fig,info
def op_plt_hslice(conf,plconf,date,FA,nest=0,**kargs): closefig = kargs.get('closefig',True) clearfig = kargs.get('clearfig',True) save = kargs.get('save',True) Varname = kargs.get('vname',False) Depth = kargs.get('depth',False) date=dateu.parse_date(date) Err=[] Out=[] pltpath = opt.nameof('out','plots',cf=conf) Figure,err = opt.get_plconf(plconf,'FIGURE') Hslices,err = opt.get_plconf(plconf,'HSLICES') Title,err=opt.get_plconf(plconf,'AXES','title') def iterv(v): try: len(v) except: v=[v] return v for i in range(len(Figure.values()[0])): # loop figures Varnames = Hslices['varnames'][i].split(',') Varnames=[s.strip() for s in Varnames] depths = Hslices['depths'][i] addcurr = Hslices['addcurr'][i] # clims = Hslices['clim'][i] depths = iterv(depths) addcurr = iterv(addcurr) # clims = iterv(clims) kargs={} kargs['ifig']=i kargs['type']=Hslices['input'][i] #kargs['time']=Inputs['time'][i] TIMES=Hslices['time'][i] kargs['dcurr']=Hslices['dcurr'][i] kargs['scurr']=Hslices['scurr'][i] kargs['lcurr']=Hslices['lcurr'][i] if Varname: varnames=[Varname] else: varnames=Varnames for var in varnames: # loop variables var=var.strip() nv=Varnames.index(var) kargs['cmap']=Hslices['cmap'][i].split(',')[nv].strip() kargs['norm']=Hslices['norm'][i].split(',')[nv].strip() kargs['var']=var vdepths=depths[nv] try: iter(vdepths) except: vdepths=[vdepths] if Depth: vdepths=[Depth] nz=-1 for z in vdepths: # loop depths nz+=1 if z in ('s','surf','surface') or z>=0: kargs['ind'] = z kargs['slice'] = 'k' else: kargs['ind'] = z kargs['slice'] = 'z' cl=opt.get_clims(date,var,z,plconf)[i] kargs['clim']=cl for addc in addcurr: # add/no currents kargs['currents'] = addc # loop over time: if TIMES=='all': if FA=='a': times=[-1] else: times=range(int(opt.get_conf(conf,'PRED','npred')[0])) else: times=[TIMES] for it in times: kargs['time']=it e,fig,info=plt_hslice(conf,plconf,date,FA,nest,**kargs) Err+=[e] if not e: if var: svar=var else: svar='' if addc: scurr='curr' else: scurr='' if info['hasz']: sslice=kargs['slice']+'_'+str(kargs['ind']) else: sslice='' if date.hour>0: sdate=date.strftime('%Y%m%d_%H') else: sdate=date.strftime('%Y%m%d') savename='%s_%s_n%d_%s_%d_%s_%s_%s_%s' % (sdate,FA,nest,kargs['type'],kargs['time'],svar,scurr,sslice,Figure['tag'][i]) if Title[i]: simpleTitle=1 rdate=date.strftime('%d-%m-%Y') if sslice: sslice=sslice[2:] title='%s#%s#n%d#%s#%d#%s#%s#%s' % (rdate,FA,nest,kargs['type'],kargs['time'],svar,scurr,sslice) if simpleTitle: # simpler version of title: if FA=='f': # forecast date: rdate=dateu.next_date(date,kargs['time']) rdate=date.strftime('%d-%m-%Y') title = '%s#%s#%s#%s' % (rdate,svar,scurr,sslice) if FA=='f': title=title+' (forec)' while 1: if title.find('##')>-1: title=title.replace('##','#') else: break title=title.replace('#',' ') pl.title(title) while 1: if savename.find('__')>-1: savename=savename.replace('__','_') else: break savename=savename.strip('_') if save: Y,M,D=date.year,date.month,date.day, Ydest = os.path.join(pltpath,'%s' % Y ) if not os.path.isdir(Ydest): os.mkdir(Ydest) MDdest = os.path.join(Ydest,'%02d_%02d' % (M,D) ) if not os.path.isdir(MDdest): os.mkdir(MDdest) savename=os.path.join(MDdest,savename+'.'+Figure['extension'][i]) Out+=[savename] pl.savefig(savename,dpi=pl.gcf().dpi) #pl.savefig(savename,dpi=300) if clearfig: fig.clear() if closefig: pl.close(fig) return Err,Out