def check_bc(date,FA,wait=3600,cf=CONF): print 'checking parent model...' import get_mercator as get_hycom nforec=opt.n_pred(cf) date1=None if FA=='f': date1=dateu.parse_date(dateu.next_date(date,nforec)) ir=get_hycom.is_ready(date,date1,check1=FA=='a') if get_hycom.is_ready(date,date1): print 'bc ok at check' return True else: now = dateu.currday() tdiff = dateu.date_diff(date,now) print "waiting for bc" while tdiff.days < 1.5: time.sleep(wait) sys.stdout.write('.') now = dateu.currday() tdiff = dateu.date_diff(date,now) cond= get_hycom.is_ready(date,date1,check1=FA=='a') print " bc file ready = ",cond,' at ',now, tdiff if cond: return True return get_hycom.is_ready(date,date1,check1=FA=='a')
def check_rst(date,cf,wait=900): '''check rst for forecast, and wait for tdiff''' FA='a' date0=dateu.next_date(date,-1) rst=opt.nameof('out','rst',date=date0,FA=FA,cf=cf) # check also if run terminated! The rst may not have the last reccord yet! rout=opt.nameof('out','rout',date=date0,FA=FA,cf=cf) if os.path.isfile(rst) and is_roms_out_ok(rout): return True else: now = dateu.currday() tdiff = dateu.date_diff(date,now) print "waiting for rst" while tdiff.days < 1.5: time.sleep(wait) sys.stdout.write('.') now = dateu.currday() tdiff = dateu.date_diff(date,now) cond=os.path.isfile(rst) and is_roms_out_ok(rout) print " rst file ready = ",cond,' at ',now, tdiff if cond: return True return os.path.isfile(rst)
def download_current(self,date=False,del1=True, quiet=True): ''' Download files for today analysis and forecast. If date is not provided (the current day is used), also download analysis data of last self.ngetBefore days. Inputs: date, default is the current day del1, if the original version is grib1, the conversion to grib2 is done and the version 1 files are removed if del1 is True quiet, print info flag ''' if not date: date=dateu.currday() else: self.ngetBefore=1 # just get the selected day! for i in range(self.ngetBefore): day=dateu.next_date(date,-i,samefmt=False) if not quiet: print 'Downloading GFS files for date '+day.strftime('%Y%m%d') if i==0: # download files for analysis and forecast: self.download_fast(day,FA='a',del1=del1,checkinv=True,quiet=quiet,prevopt=False) self.download_fast(day,FA='f',del1=del1,checkinv=True,quiet=quiet,prevopt=False) else: # download any missing analysis file from prev days: self.download_fast(day,FA='a',del1=del1,checkinv=False,quiet=quiet,prevopt=True)
def download_current(self,date=False,del1=True, quiet=True): ''' Download files for today analysis and forecast. If date is not provided (the current day is used), also download analysis data of last self.ngetBefore days. Inputs: date, default is the current day del1, if the original version is grib1, the conversion to grib2 is done and the version 1 files are removed if del1 is True quiet, print info flag ''' if not date: date=dateu.currday() else: self.ngetBefore=1 # just get the selected day! for i in range(self.ngetBefore): day=dateu.next_date(date,-i,samefmt=False) if not quiet: print('Downloading GFS files for date '+day.strftime('%Y%m%d')) if i==0: # download files for analysis and forecast: self.download_fast(day,FA='a',del1=del1,checkinv=True,quiet=quiet,prevopt=False) self.download_fast(day,FA='f',del1=del1,checkinv=True,quiet=quiet,prevopt=False) else: # download any missing analysis file from prev days: self.download_fast(day,FA='a',del1=del1,checkinv=False,quiet=quiet,prevopt=True)
def log_name(cf, ccf): today = dateu.currday(format='short', sep='') logconf, err = opt.get_conf(ccf, 'LOG', type=str) name = logconf['logname'] create = eval(logconf['create']) if create: p = opt.pathof(cf, 'logpath') name = name.replace('#TODAY#', today) return os.path.join(p, name) else: return False
def log_name(cf,ccf): today=dateu.currday(format='short',sep='') logconf,err = opt.get_conf(ccf,'LOG',type=str) name = logconf['logname'] create = eval(logconf['create']) if create: p=opt.pathof(cf,'logpath') name=name.replace('#TODAY#',today) return os.path.join(p,name) else: return False
def download_fast(self, date, FA='af', del1=True, checkinv=False, quiet=True, prevopt=True): ''' Downloads all the self.egrep variables for date. By default both analysis and forecast data is downloaded. Used by download_current and download_range (use then instead) Inputs: date, FA ('f'orecast of 'a'nalysis or 'af') del1, if the original version is grib1, the conversion to grib2 is done and the version 1 files are removed if del1 is True checkinv will check if inv file exists... increases speed for current day downloads!! prevopt, check for previous options file is the "best one" is note present ''' # daily files: targets, destinations = self.daily_files(date, FA) # start download log: log = os.path.join(self.logFolder, 'download.log') if not os.path.isdir(self.logFolder): os.makedirs(self.logFolder) open(log, 'a').write('::::' + dateu.currday( local=True).strftime("%b %d %Y %H:%M:%S") + '\n') for i in range(len(targets)): err, dest2best = self.__download_fast_once(targets[i], checkinv, log, del1, quiet) # if download failes, check previous files, nAttenpts, if best file is not present: nAttempts = 0 Target = targets[i] while err and nAttempts <= self.attmax and prevopt: nAttempts += 1 prevtarget = self.prev_option(Target) Target = prevtarget err, dest2 = self.__download_fast_once(prevtarget, False, log, del1, quiet) if not err: # link file: open(log, 'a').write(':: Linking ' + dest2 + '\n') os.symlink(os.path.realpath(dest2), os.path.realpath(dest2best)) if not quiet: print('linking ', dest2, dest2best)
def gen_mdates(cf, prev_month=-1): start_date, end_date = opt.dates_info(cf) y1, m1, d1 = dateu.parse_date(start_date) y, m, d = dateu.parse_date(dateu.currday()) y2, m2 = dateu.next_month(y, m, n=prev_month) dates = dateu.mrange(y1, m1, y2, m2) out = [] for i in range(len(dates) - 1): date1 = dates[i] date2 = dateu.next_date(dates[i + 1], -1) out += [(date1, date2)] return out
def gen_mdates(cf,prev_month=-1): start_date,end_date=opt.dates_info(cf) y1,m1,d1=dateu.parse_date(start_date) y,m,d=dateu.parse_date(dateu.currday()) y2,m2=dateu.next_month(y,m,n=prev_month) dates=dateu.mrange(y1,m1,y2,m2) out=[] for i in range(len(dates)-1): date1=dates[i] date2=dateu.next_date(dates[i+1],-1) out+=[(date1,date2)] return out
def download_fast(self,date,FA='af',del1=True,checkinv=False,quiet=True,prevopt=True): ''' Downloads all the self.egrep variables for date. By default both analysis and forecast data is downloaded. Used by download_current and download_range (use then instead) Inputs: date, FA ('f'orecast of 'a'nalysis or 'af') del1, if the original version is grib1, the conversion to grib2 is done and the version 1 files are removed if del1 is True checkinv will check if inv file exists... increases speed for current day downloads!! prevopt, check for previous options file is the "best one" is note present ''' # daily files: targets,destinations=self.daily_files(date,FA) # start download log: log=os.path.join(self.logFolder,'download.log') if not os.path.isdir(self.logFolder): os.makedirs(self.logFolder) open(log,'a').write('::::'+dateu.currday(local=True).strftime("%b %d %Y %H:%M:%S")+'\n') for i in range(len(targets)): err,dest2best=self.__download_fast_once(targets[i],checkinv,log,del1,quiet) # if download failes, check previous files, nAttenpts, if best file is not present: nAttempts=0 Target=targets[i] while err and nAttempts<=self.attmax and prevopt: nAttempts+=1 prevtarget=self.prev_option(Target) Target=prevtarget err,dest2=self.__download_fast_once(prevtarget,False,log,del1,quiet) if not err: # link file: open(log,'a').write(':: Linking '+dest2+'\n') os.symlink(os.path.realpath(dest2),os.path.realpath(dest2best)) if not quiet: print('linking ',dest2,dest2best)
def create(self): ''' Creates model netcdf river forcing file ''' nc=netcdf.Pync(self.fname,self.perm,version=self.ncversion) nx=netcdf.fdim(self.grid,'xi_rho') ny=netcdf.fdim(self.grid,'eta_rho') # Dimensions: nc.add_dim('s_rho',self.nz) nc.add_dim('river',self.nrivers) nc.add_dim('river_time',0) # Variables: v=nc.add_var('river',np.dtype('d'),('river',)) v.add_att('long_name','river runoff identification number') v=nc.add_var('river_Xposition',np.dtype('d'),('river',)) v.add_att('long_name','river XI-position at RHO-points') v.add_att('valid_min',1) v.add_att('valid_max',nx-1) v=nc.add_var('river_Eposition',np.dtype('d'),('river',)) v.add_att('long_name','river ETA-position at RHO-points') v.add_att('valid_min',1) v.add_att('valid_max',ny-1) v=nc.add_var('river_direction',np.dtype('d'),('river',)) v.add_att('long_name','river runoff direction') v=nc.add_var('river_Vshape',np.dtype('d'),('s_rho','river')) v.add_att('long_name','river runoff mass transport vertical profile') v=nc.add_var('river_time',np.dtype('d'),('river_time',)) v.add_att('long_name','river runoff time') v.add_att('units',self.tunits) v.add_att('add_offset',0) v=nc.add_var('river_transport',np.dtype('d'),('river_time','river')) v.add_att('long_name','river runoff vertically integrated mass transport') v.add_att('units','metre3 second-1') v.add_att('time','river_time') v=nc.add_var('river_temp',np.dtype('d'),('river_time','s_rho','river')) v.add_att('long_name','river runoff potential temperature') v.add_att('units','Celsius') v.add_att('time','river_time') v=nc.add_var('river_salt',np.dtype('d'),('river_time','s_rho','river')) v.add_att('long_name','river runoff salinity') v.add_att('units','Celsius') v.add_att('time','river_time') # Global Attributes: nc.add_att('type',self.type) nc.add_att('title',self.title) nc.add_att('grd_file',os.path.realpath(self.grid)) nc.add_att('date',dts.currday().isoformat(' ')) nc.add_att('author',cb.username()[1]+', '+cb.username()[0]+'@'+cb.machinename()) # extra attrs: for i in self.attr.keys(): nc.add_att(i,self.attr[i]) nc.close()
def check_atm(date,FA,wait=3600,cf=CONF): atmPath = opt.pathof(cf,'external','atm') atmData=opt.atm_info(cf)['data'] npred=opt.n_pred(cf) if atmData=='wrf': from ompy.roms import atmblk_forcing as bfrc ir=bfrc.is_ready_wrf(date,FA,wrfpath=atmPath) elif atmData=='gfs': from okean.datasets.gfs import is_ready as is_ready_gfs ir=is_ready_gfs(atmPath,date,FA,npred) if ir is True: print 'atm ok at check' return True now = dateu.currday() tdiff = now-date tdiff=tdiff.days+tdiff.seconds/86400. print "waiting for atm: %s" % FA if FA=='a': # wait till 12am today to start creating anaylysis of prev day!->tdif=1.5 # with all atm data ok # if time higher, the available possible data is used. while tdiff < 1.5: time.sleep(wait) sys.stdout.write('.') now = dateu.currday() tdiff = now-date tdiff=tdiff.days+tdiff.seconds/86400. if atmData=='wrf': ir=bfrc.is_ready_wrf(date,FA,wrfpath=atmPath) elif atmData=='gfs': ir=is_ready_gfs(atmPath,date,FA,npred) print " atm data ready = %s : %s tdiff %6.2f"%(ir,now.isoformat(' '), tdiff) if ir is True: return True elif FA=='f': # if atm not data present after tdiff, forget current forecast while tdiff < 1.5: time.sleep(wait) sys.stdout.write('.') now = dateu.currday() tdiff = now-date tdiff=tdiff.days+tdiff.seconds/86400. if atmData=='wrf': ir=bfrc.is_ready_wrf(date,FA,wrfpath=atmPath) elif atmData=='gfs': ir=is_ready_gfs(atmPath,date,FA,npred) print " atm data ready = ",ir,' at ',now, tdiff if ir is True: return True return ir
def last_week(cf): y.m,d=dateu.parse_date(dateu.currday())