forecasts =TS.get_forecast_days(rundays=[2]) forecasts_sublist=TS.get_sublist(forecasts,[2,3,4]) #forecast tuesday and wed,thu sat_archive="/gss/gss_work/DRES_OGS_BiGe/Observations/TIME_RAW_DATA/ONLINE/SAT/MODIS/DAILY/CHECKED/" DAILY_SAT_LIST=TS.get_daily_sat(forecasts_sublist,sat_archive) # float aggregator already done by others day=0 surf_layer=Layer(0,10) for time,archived_file,satfile in DAILY_SAT_LIST: avefile=INPUTDIR + os.path.basename(archived_file)[:-3] day=day+1 outfile=OUTDIR + "misfit+%dh.nc" % (day*24) print avefile continue Sat16 = Sat.convertinV4format( Sat.readfromfile(satfile) ) De = DataExtractor(TheMask,filename=avefile, varname='P_i') Model = MapBuilder.get_layer_average(De, surf_layer) Misfit = Sat16-Model cloudsLand = (np.isnan(Sat16)) #| (Sat16 > 1.e19) | (Sat16<0) modelLand = np.isnan(Model) #lands are nan nodata = cloudsLand | modelLand selection = ~nodata # & TheMask.mask_at_level(200.0) Misfit[nodata] = np.NaN netcdf3.write_2d_file(Misfit, 'chl_misfit', outfile, TheMask)
Time__end="20160101" TI = TimeInterval(Timestart,Time__end,"%Y%m%d") TLCheck = TimeList.fromfilenames(TI, CHECKDIR,"*.nc",prefix='',dateformat='%Y%m%d') IonamesFile = '../../postproc/IOnames_sat.xml' IOname = IOnames.IOnames(IonamesFile) MONTHLY_reqs=TLCheck.getMonthlist() jpi = Sat.NativeMesh.jpi jpj = Sat.NativeMesh.jpj for req in MONTHLY_reqs: outfile = req.string + IOname.Output.suffix + ".nc" outpathfile = MONTHLYDIR + outfile conditionToSkip = (os.path.exists(outpathfile)) and (not reset) if conditionToSkip: continue print outfile ii, w = TLCheck.select(req) nFiles = len(ii) M = np.zeros((nFiles,jpj,jpi),np.float32) for iFrame, j in enumerate(ii): inputfile = TLCheck.filelist[j] CHL = Sat.readfromfile(inputfile) M[iFrame,:,:] = CHL CHL_OUT = Sat.logAverager(M) Sat.dumpV4file(outpathfile, CHL_OUT)
TI = TimeInterval(Timestart,Time__end,"%Y%m%d") TLCheck = TimeList.fromfilenames(TI, CHECKDIR,"*.nc",prefix='',dateformat='%Y%m%d') #IonamesFile = '../../../postproc/IOnames_sat.cci.xml' #IOname = IOnames.IOnames(IonamesFile) suffix = os.path.basename(TLCheck.filelist[0])[8:] WEEK_reqs=TLCheck.getWeeklyList(2) jpi = Sat.masks.KD490mesh.jpi jpj = Sat.masks.KD490mesh.jpj for req in WEEK_reqs: outfile = req.string + suffix outpathfile = WEEKLYDIR + outfile conditionToSkip = (os.path.exists(outpathfile)) and (not reset) if conditionToSkip: continue print outfile ii, w = TLCheck.select(req) nFiles = len(ii) M = np.zeros((nFiles,jpj,jpi),np.float32) for iFrame, j in enumerate(ii): inputfile = TLCheck.filelist[j] Kext = Sat.readfromfile(inputfile,'KD490') M[iFrame,:,:] = Kext Kext_OUT = Sat.averager(M) Sat.dump_KD490_nativefile(outpathfile, Kext_OUT)
if somecheck: MEAN,STD = Sat.readClimatology(CLIM_FILE) else: print "All checks done" for iTime, filename in enumerate(TL_orig.filelist): outfile = CHECKDIR + os.path.basename(filename) exit_condition = os.path.exists(outfile) and (not reset) if exit_condition: continue julian = int( TL_orig.Timelist[iTime].strftime("%j") ) if julian==366: julian=365 DAILY_REF_MEAN = MEAN[julian-1,:,:] DAILY_REF_STD = STD[julian-1,:,:] CHL_IN = Sat.readfromfile(filename,'KD490') #CHL_IN[581:,164:] = Sat.fillValue # BLACK SEA cloudsLandTIME = CHL_IN == Sat.fillValue cloudlandsCLIM = DAILY_REF_MEAN == Sat.fillValue CHL_OUT = CHL_IN.copy() CHL_OUT[cloudsLandTIME] = Sat.fillValue CHL_OUT[cloudlandsCLIM] = Sat.fillValue counter_refNAN = (~cloudsLandTIME & cloudlandsCLIM).sum(axis=None) outOfRange = np.abs(CHL_IN - DAILY_REF_MEAN) > DAILY_REF_STD *2.0 outOfRange[cloudsLandTIME | cloudlandsCLIM ] = False counter_elim = outOfRange.sum(axis = None) CHL_OUT[outOfRange] = Sat.fillValue
TI = TimeInterval(Timestart, Time__end, "%Y%m%d") TL = TimeList.fromfilenames(TI, INPUTDIR, "*.nc", prefix="", dateformat="%Y%m%d") # MY_YEAR = TimeInterval('20130101','20140101',"%Y%m%d") req_label = Timestart[0:4] #'Ave:2013' req = requestors.Generic_req(TI) indexes, weights = TL.select(req) nFrames = len(indexes) SAT_3D = np.zeros((nFrames, jpj, jpi), np.float32) for iFrame, k in enumerate(indexes): t = TL.Timelist[k] inputfile = INPUTDIR + t.strftime("%Y%m%d") + "_d-OC_CNR-L4-CHL-MedOC4_SAM_7KM-MED-REP-v02.nc" CHL = Sat.readfromfile(inputfile, "lchlm") SAT_3D[iFrame, :, :] = CHL Sat2d = Sat.averager(SAT_3D) masknan = TheMask.mask_at_level(0) Sat2d[~masknan] = np.NaN var = "SATchl" layer = Layer(0, 10) fig, ax = mapplot( {"varname": var, "clim": [0, 0.4], "layer": layer, "data": Sat2d, "date": "annual"}, fig=None, ax=None, mask=TheMask, coastline_lon=clon,
TI = TimeInterval('20010101','20141230',"%Y%m%d") # VALID FOR REANALYSIS RUN TL = TimeList.fromfilenames(TI, INPUTDIR,"*.nc", prefix="", dateformat="%Y%m") MY_YEAR = TimeInterval('20010101','20141230',"%Y%m%d") req_label='Ave:2001-2014' req = requestors.Generic_req(MY_YEAR) indexes,weights = TL.select(req) nFrames = len(indexes) SAT_3D=np.zeros((nFrames,jpj,jpi), np.float32) for iFrame, k in enumerate(indexes): t = TL.Timelist[k] inputfile = INPUTDIR + t.strftime("%Y%m") + "_d-OC_CNR-L4-CHL-MedOC4_SAM_7KM-MED-REP-v02.nc" CHL = Sat.readfromfile(inputfile,'lchlm') SAT_3D[iFrame,:,:] = CHL Sat2d=Sat.averager(SAT_3D) mask=TheMask.mask_at_level(0) Sat2d[~mask] = np.NaN var = 'SATchl' #layer = Layer(0,10) #fig,ax = mapplot({'varname':var, 'clim':[0,0.4], 'layer':layer, 'data':Sat2d, 'date':'annual'},fig=None,ax=None,mask=TheMask,coastline_lon=clon,coastline_lat=clat) fig,ax = mapplot({'clim':[0,0.4], 'data':Sat2d},fig=None,ax=None,mask=TheMask,coastline_lon=clon,coastline_lat=clat) ax.set_xlim([-5,36]) ax.set_ylim([30,46]) ax.set_xlabel('Lon').set_fontsize(12) ax.set_ylabel('Lat').set_fontsize(12)
TheMask=Mask('/pico/home/usera07ogs/a07ogs00/OPA/V2C/etc/static-data/MED1672_cut/MASK/meshmask.nc') jpk,jpj,jpi = TheMask.shape x = TheMask.xlevels[0,:] y = TheMask.ylevels[:,0] x1km = Sat.masks.KD490mesh.lon y1km = Sat.masks.KD490mesh.lat I_START, I_END = interp2d.array_of_indices_for_slicing(x, x1km) J_START, J_END = interp2d.array_of_indices_for_slicing(y, y1km) INPUTDIR="/gss/gss_work/DRES_OGS_BiGe/Observations/TIME_RAW_DATA/STATIC/SAT/KD490/MONTHLY/ORIGMESH/" OUTPUTDIR="/gss/gss_work/DRES_OGS_BiGe/Observations/TIME_RAW_DATA/STATIC/SAT/KD490/MONTHLY/V4/" dateformat="%Y%m" reset = False Timestart="19990102" Time__end="20160101" TI = TimeInterval(Timestart,Time__end,"%Y%m%d") TL = TimeList.fromfilenames(TI, INPUTDIR,"*.nc",prefix='',dateformat=dateformat) for filename in TL.filelist: outfile = OUTPUTDIR + os.path.basename(filename) exit_condition = os.path.exists(outfile) and (not reset) if exit_condition: continue Mfine = Sat.readfromfile(filename,'KD490') M16 = interp2d.interp_2d_by_cells_slices(Mfine, TheMask, I_START, I_END, J_START, J_END) Sat.dump_simple_V4file(outfile, M16, 'KD490')
if somecheck: MEAN,STD = Sat.readClimatology(CLIM_FILE) else: print "All checks done" for iTime, filename in enumerate(TL_orig.filelist): outfile = CHECKDIR + os.path.basename(filename) exit_condition = os.path.exists(outfile) and (not reset) if exit_condition: continue julian = int( TL_orig.Timelist[iTime].strftime("%j") ) DAILY_REF_MEAN = MEAN[julian-1,:,:] DAILY_REF_STD = STD[julian-1,:,:] CHL_IN = Sat.readfromfile(filename) CHL_IN[581:,164:] = Sat.fillValue # BLACK SEA cloudsLandTIME = CHL_IN == Sat.fillValue cloudlandsCLIM = DAILY_REF_MEAN == Sat.fillValue CHL_OUT = CHL_IN.copy() CHL_OUT[cloudsLandTIME] = Sat.fillValue CHL_OUT[cloudlandsCLIM] = Sat.fillValue counter_refNAN = (~cloudsLandTIME & cloudlandsCLIM).sum(axis=None) outOfRange = np.abs(CHL_IN - DAILY_REF_MEAN) > DAILY_REF_STD *2.0 outOfRange[cloudsLandTIME | cloudlandsCLIM ] = False counter_elim = outOfRange.sum(axis = None) CHL_OUT[outOfRange] = Sat.fillValue
for itime, modeltime in enumerate(model_TL.Timelist): print modeltime CoupledList = sat_TL.couple_with([modeltime]) sattime = CoupledList[0][0] satfile = REF_DIR + sattime.strftime(IOname.Input.dateformat) + IOname.Output.suffix + ".nc" modfile = model_TL.filelist[itime] De = DataExtractor(TheMask,filename=modfile, varname='P_i') Model = MapBuilder.get_layer_average(De, surf_layer) #ncIN = NC.netcdf_file(modfile,'r') #Model = ncIN.variables['P_i'].data[0,0,:,:].copy()#.astype(np.float64) #Model = ncIN.variables['lchlm'].data.copy() #ncIN.close() Sat16 = Sat.readfromfile(satfile,var='lchlm') #.astype(np.float64) cloudsLand = (np.isnan(Sat16)) | (Sat16 > 1.e19) | (Sat16<0) modelLand = np.isnan(Model) #lands are nan nodata = cloudsLand | modelLand selection = ~nodata & masksel_2D M = matchup.matchup(Model[selection], Sat16[selection]) for isub, sub in enumerate(OGS.P): selection = SUB[sub.name] & (~nodata) & masksel_2D M = matchup.matchup(Model[selection], Sat16[selection]) BGC_CLASS4_CHL_RMS_SURF_BASIN[itime,isub] = M.RMSE() BGC_CLASS4_CHL_BIAS_SURF_BASIN[itime,isub] = M.bias() weight = TheMask.area[selection] MODEL_MEAN[itime,isub] = weighted_mean( M.Model,weight)
COASTNESS_LIST=['coast','open_sea','everywhere'] nCOAST = len(COASTNESS_LIST) dtype = [(coast, np.bool) for coast in COASTNESS_LIST] COASTNESS = np.ones((jpj,jpi),dtype=dtype) COASTNESS['coast'] = ~mask200_2D; COASTNESS['open_sea'] = mask200_2D; #COASTNESS['everywhere'] = True; satfile = glob.glob(REF_DIR+date+"*")[0] modfile = MODELDIR + "ave." + date + "-12:00:00.nc" Model = netcdf3.read_3d_file(modfile, 'P_l')[0,:,:] try: Sat16 = Sat.readfromfile(satfile,'lchlm') # weekly except: Sat16 = Sat.convertinV4format(Sat.readfromfile(satfile, 'CHL')) # daily cloudsLand = np.isnan(Sat16) Sat16[cloudsLand] = -999.0 cloudsLand = Sat16==-999.0; modelLand = Model > 1.0e+19 nodata = cloudsLand | modelLand BGC_CLASS4_CHL_RMS_SURF_BASIN = np.zeros((nSUB,nCOAST),np.float32) BGC_CLASS4_CHL_BIAS_SURF_BASIN = np.zeros((nSUB,nCOAST),np.float32) VALID_POINTS = np.zeros((nSUB,nCOAST),np.float32) MODEL_MEAN = np.zeros((nSUB,nCOAST),np.float32)