def download_range(self,date0,date1,quiet=True): ''' Download analysis data in the interval [date0... date1[ ''' dates=dateu.drange(date0,date1,inclast=False) for date in dates: self.download_fast(date,FA='a',del1=True,checkinv=True,quiet=quiet,prevopt=True)
def __files(self,date0,date1=False,FA='a',nforec='auto'): ''' Used by files_analysis and files_forecast ''' if FA=='f': date1=False if nforec=='auto': args={} else: args={'nforec':nforec} a=GFSDownload(basefolder=self.basefolder,**args) if date1 is False: dates=[date0] else: dates=dateu.drange(date0,date1) files=[] time=[] isbest=[] # first file, 00h data (last of previous day) datePrev=dateu.next_date(date0,-1) file0=a.daily_files(datePrev,FA='a')[1][-1]['name'] for d in dates: Src,Dest=a.daily_files(d,FA=FA) for dest in Dest: files+=[dest['name']] if files: files=[file0]+files for f in files: time+=[get_date(f)] if os.path.isfile(f): isbest+=[not os.path.islink(f)] else: isbest+=[None] return files,time,isbest
def __files(self, date0, date1=False, FA='a', nforec='auto'): ''' Used by files_analysis and files_forecast ''' if FA == 'f': date1 = False if nforec == 'auto': args = {} else: args = {'nforec': nforec} a = GFSDownload(basefolder=self.basefolder, **args) if date1 is False: dates = [date0] else: dates = dateu.drange(date0, date1) files = [] time = [] isbest = [] # first file, 00h data (last of previous day) datePrev = dateu.next_date(date0, -1) file0 = a.daily_files(datePrev, FA='a')[1][-1]['name'] for d in dates: Src, Dest = a.daily_files(d, FA=FA) for dest in Dest: files += [dest['name']] if files: files = [file0] + files for f in files: time += [get_date(f)] if os.path.isfile(f): isbest += [not os.path.islink(f)] else: isbest += [None] return files, time, isbest
def files(self,date0,date1=False): date0=dateu.parse_date(date0) if not date1: date1=date0+datetime.timedelta(1) else: date1=dateu.parse_date(date1) dates=dateu.drange(date0,date1,True) files=[] time=[] for d in dates: if d==dates[-1]: hours=[0] else: hours=range(0,24,narrdt) for hour in hours: #fname='narr-a_%d_%s_%02d00_000.grb' % (int(d.strftime('%j'))-1,d.strftime('%Y%m%d'),hour) fname='narr-a_221_%s_%02d00_000.grb' % (d.strftime('%Y%m%d'),hour) files+=[os.path.join(baseurl,d.strftime('%Y%m'),d.strftime('%Y%m%d'),fname)] time+=[d+datetime.timedelta(hour/24.)] return files, np.array(time)
def decompress_year(self, year): dates = dateu.drange((year, 1, 1), (year + 1, 1, 1)) for d in dates: self.decompress_day(d)
def download_year(self, year): dates = dateu.drange((year, 1, 1), (year + 1, 1, 1)) for d in dates: self.download_day(d)
def update_wind_blended2(fname, datapaths, **kargs): ''' In days without blended data will try to use quikscat data ''' from okean.datasets import quikscat from okean.datasets import blended_wind a = blended_wind.WINDData(datapaths[0]) b = quikscat.WINDData(datapaths[1]) time = netcdf.nctime(fname, 'time') date0 = dts.next_date(time[0], -1) date1 = dts.next_date(time[-1], +2) data = a.data(date0, date1) # limit are... otherwise, quikscat interp will be very slow! grd = netcdf.fatt(fname, 'grd_file') import os if not os.path.isfile(grd): grd = kargs['grd'] cond, inds = rt.grid_vicinity(grd, data['x'], data['y'], margin=5, rect=True, retinds=True) i1, i2, j1, j2 = inds for d in data.keys(): if d == 'x': data[d] = data[d][i1:i2] elif d == 'y': data[d] = data[d][j1:j2] else: data[d] = data[d][j1:j2, i1:i2] # check for missing days: time0 = data.keys() x0 = data['x'] y0 = data['y'] x0, y0 = np.meshgrid(x0, y0) time0.remove('x') time0.remove('y') out = OrderedDict() out['x'] = x0 out['y'] = y0 info = '' qs_ij_limits_done = False for d in dts.drange(date0, date1): found = 0 for t in time0: if (t.year, t.month, t.day) == (d.year, d.month, d.day): print('==> blended : ', t) out[t] = data[t] found = 1 if not found: # use quikscat: print('==> quikscat : ', d.strftime('%Y-%m-%d')) tmp = b.data(d, dts.next_date(d)) if not tmp.has_key('x'): continue x, y = tmp['x'], tmp['y'] x, y = np.meshgrid(x, y) # reduce qs data: if not qs_ij_limits_done: i1, i2, j1, j2 = calc.ij_limits(x, y, [x0.min(), x0.max()], [y0.min(), y0.max()]) qs_ij_limits_done = True x = x[j1:j2, i1:i2] y = y[j1:j2, i1:i2] tmp[tmp.keys()[0]] = tmp[tmp.keys()[0]][j1:j2, i1:i2] print(' griddata u') u = calc.griddata(x, y, tmp[tmp.keys()[0]].real, x0, y0) print(' griddata v') v = calc.griddata(x, y, tmp[tmp.keys()[0]].imag, x0, y0) out[tmp.keys()[0]] = u + 1.j * v info += '#' + d.strftime('%Y%m%d') new_wind_info = 'blended+quikscat at days: ' + info update_wind(fname, out, new_wind_info, **kargs)
def update_wind_blended2(fname, datapaths, **kargs): """ In days without blended data will try to use quikscat data """ from okean.datasets import quikscat from okean.datasets import blended_wind a = blended_wind.WINDData(datapaths[0]) b = quikscat.WINDData(datapaths[1]) time = netcdf.nctime(fname, "time") date0 = dts.next_date(time[0], -1) date1 = dts.next_date(time[-1], +2) data = a.data(date0, date1) # limit are... otherwise, quikscat interp will be very slow! grd = netcdf.fatt(fname, "grd_file") import os if not os.path.isfile(grd): grd = kargs["grd"] cond, inds = rt.grid_vicinity(grd, data["x"], data["y"], margin=5, rect=True, retinds=True) i1, i2, j1, j2 = inds for d in data.keys(): if d == "x": data[d] = data[d][i1:i2] elif d == "y": data[d] = data[d][j1:j2] else: data[d] = data[d][j1:j2, i1:i2] # check for missing days: time0 = data.keys() x0 = data["x"] y0 = data["y"] x0, y0 = np.meshgrid(x0, y0) time0.remove("x") time0.remove("y") out = cb.odict() out["x"] = x0 out["y"] = y0 info = "" qs_ij_limits_done = False for d in dts.drange(date0, date1): found = 0 for t in time0: if (t.year, t.month, t.day) == (d.year, d.month, d.day): print "==> blended : ", t out[t] = data[t] found = 1 if not found: # use quikscat: print "==> quikscat : ", d.strftime("%Y-%m-%d") tmp = b.data(d, dts.next_date(d)) if not tmp.has_key("x"): continue x, y = tmp["x"], tmp["y"] x, y = np.meshgrid(x, y) # reduce qs data: if not qs_ij_limits_done: i1, i2, j1, j2 = calc.ij_limits(x, y, [x0.min(), x0.max()], [y0.min(), y0.max()]) qs_ij_limits_done = True x = x[j1:j2, i1:i2] y = y[j1:j2, i1:i2] tmp[tmp.keys()[0]] = tmp[tmp.keys()[0]][j1:j2, i1:i2] print " griddata u" u = calc.griddata(x, y, tmp[tmp.keys()[0]].real, x0, y0) print " griddata v" v = calc.griddata(x, y, tmp[tmp.keys()[0]].imag, x0, y0) out[tmp.keys()[0]] = u + 1.0j * v info += "#" + d.strftime("%Y%m%d") new_wind_info = "blended+quikscat at days: " + info update_wind(fname, out, new_wind_info, **kargs)
def update_wind_blended2(fname,datapaths,**kargs): ''' In days without blended data will try to use quikscat data ''' from okean.datasets import quikscat from okean.datasets import blended_wind a=blended_wind.WINDData(datapaths[0]) b=quikscat.WINDData(datapaths[1]) time=netcdf.nctime(fname,'time') date0=dts.next_date(time[0],-1) date1=dts.next_date(time[-1],+2) data=a.data(date0,date1) # limit are... otherwise, quikscat interp will be very slow! grd=netcdf.fatt(fname,'grd_file') import os if not os.path.isfile(grd): grd=kargs['grd'] cond,inds=rt.grid_vicinity(grd,data['x'],data['y'],margin=5,rect=True,retinds=True) i1,i2,j1,j2=inds for d in data.keys(): if d == 'x': data[d]=data[d][i1:i2] elif d == 'y': data[d]=data[d][j1:j2] else: data[d]=data[d][j1:j2,i1:i2] # check for missing days: time0=data.keys() x0=data['x'] y0=data['y'] x0,y0=np.meshgrid(x0,y0) time0.remove('x') time0.remove('y') out=OrderedDict() out['x']=x0 out['y']=y0 info='' qs_ij_limits_done=False for d in dts.drange(date0,date1): found=0 for t in time0: if (t.year,t.month,t.day)==(d.year,d.month,d.day): print('==> blended : ',t) out[t]=data[t] found=1 if not found: # use quikscat: print('==> quikscat : ',d.strftime('%Y-%m-%d')) tmp= b.data(d,dts.next_date(d)) if not tmp.has_key('x'): continue x,y=tmp['x'],tmp['y'] x,y=np.meshgrid(x,y) # reduce qs data: if not qs_ij_limits_done: i1,i2,j1,j2=calc.ij_limits(x,y,[x0.min(),x0.max()],[y0.min(),y0.max()]) qs_ij_limits_done=True x=x[j1:j2,i1:i2] y=y[j1:j2,i1:i2] tmp[tmp.keys()[0]]=tmp[tmp.keys()[0]][j1:j2,i1:i2] print(' griddata u') u=calc.griddata(x,y,tmp[tmp.keys()[0]].real,x0,y0) print(' griddata v') v=calc.griddata(x,y,tmp[tmp.keys()[0]].imag,x0,y0) out[tmp.keys()[0]]=u+1.j*v info+='#'+d.strftime('%Y%m%d') new_wind_info='blended+quikscat at days: '+info update_wind(fname,out,new_wind_info,**kargs)
def decompress_year(self,year): dates=dateu.drange((year,1,1),(year+1,1,1)) for d in dates: self.decompress_day(d)
def download_year(self,year): dates=dateu.drange((year,1,1),(year+1,1,1)) for d in dates: self.download_day(d)