def __init__(self,Ti,ARCHIVE_DIR,search_type,postfix_dir=""): ''' timelistcontainer is a reader class for files produced by SatValidation, in operational chain or similar sequential runs Arguments: * Ti * a TimeInterval * ARCHIVE_DIR * string indicating path of the chain archive directory e.g. /pico/home/usera07ogs/a07ogs00/OPA/V2C/archive/ * search_type * string, having one of these values: f0, f1, f2, a0, a1, a2 * postfix_dir * string to pass to TimeSeries objects ''' self.timelist=[] self.filelist=[] self.bias = None self.number=None self.rmse =None self.model =None self.sat = None self.rmselog = None self.biaslog = None self.search_type=search_type TS=TimeSeries(Ti,archive_dir=ARCHIVE_DIR, postfix_dir=postfix_dir, glob_pattern="Validation_f0") search_paths = TS.get_runs([2]) for _, directory in search_paths: dirlist=glob.glob(directory + "Validation_" + search_type + "*") if len(dirlist)> 0: self.filelist.append(dirlist[0]) self.nFrames = len(self.filelist) self.read_basic_info(self.filelist[0]) self.readfiles()
def __init__(self,Ti,ARCHIVE_DIR,postfix_dir=""): self.timelist=[] self.filelist=[] self.bias = None self.number=None self.rmse =None TS=TimeSeries(Ti,archive_dir=ARCHIVE_DIR, postfix_dir=postfix_dir, glob_pattern="BioFloat_Weekly_validation") search_paths = TS.get_runs([2]) for _, directory in search_paths: dirlist=glob.glob(directory + "BioFloat_Weekly_validation*") if len(dirlist)> 0: self.filelist.append(dirlist[0]) self.nFrames = len(self.filelist) self.read_basic_info(self.filelist[0]) self.readfiles()
import numpy as np import os from commons import netcdf3 starttime='20160301' end__time='20160308' INPUTDIR='/gpfs/work/IscrC_MYMEDBIO/COPERNICUS/online_validation_data/TMP/' #args.inputdir OUTDIR ='/gpfs/work/IscrC_MYMEDBIO/COPERNICUS/online_validation_data/TMP/' #args.outdir maskfile='/pico/home/usera07ogs/a07ogs00/OPA/V2C/etc/static-data/MED1672_cut/MASK/meshmask.nc' # args.maskfile TI=TimeInterval(starttime,end__time,'%Y%m%d') archive_dir='/pico/home/usera07ogs/a07ogs00/OPA/V4/archive' TheMask=Mask(maskfile) TS = TimeSeries(TI, archive_dir,postfix_dir='POSTPROC/AVE_FREQ_1/',glob_pattern="ave*gz") forecasts =TS.get_forecast_days(rundays=[2]) forecasts_sublist=TS.get_sublist(forecasts,[2,3,4]) #forecast tuesday and wed,thu sat_archive="/gss/gss_work/DRES_OGS_BiGe/Observations/TIME_RAW_DATA/ONLINE/SAT/MODIS/DAILY/CHECKED/" DAILY_SAT_LIST=TS.get_daily_sat(forecasts_sublist,sat_archive) # float aggregator already done by others day=0 surf_layer=Layer(0,10) for time,archived_file,satfile in DAILY_SAT_LIST: avefile=INPUTDIR + os.path.basename(archived_file)[:-3] day=day+1 outfile=OUTDIR + "misfit+%dh.nc" % (day*24) print avefile continue
args = argument() from commons.timeseries import TimeSeries from commons.time_interval import TimeInterval from commons.utils import addsep starttime=args.starttime end__time=args.endtime LOC = addsep(args.outdir) archive_dir= args.arcdir TI=TimeInterval(starttime,end__time,'%Y%m%d') if args.type=='analysis': T_bio = TimeSeries(TI, archive_dir,postfix_dir='POSTPROC/AVE_FREQ_1/ARCHIVE/',glob_pattern="ave*gz") T_phys= TimeSeries(TI, archive_dir,postfix_dir='OPAOPER_A/' ,glob_pattern="*gz" ) T_bio.extract_analysis( LOC + 'output_bio/') T_phys.extract_analysis(LOC + 'output_phys/'); if args.type =='forecast': T_bio = TimeSeries(TI, archive_dir,postfix_dir='POSTPROC/AVE_FREQ_1/ARCHIVE/',glob_pattern="ave*gz") T_phys_s= TimeSeries(TI, archive_dir,postfix_dir='OPAOPER_A/' ,glob_pattern="*gz" ) T_phys_f= TimeSeries(TI, archive_dir,postfix_dir='OPAOPER_F/' ,glob_pattern="*gz" ) T_bio.extract_simulation(LOC + 'output_bio/') T_phys_s.extract_simulation(LOC + 'output_phys/'); T_bio.extract_forecast( LOC + 'output_bio/')