Ejemplo n.º 1
0
 def __init__(self, startStr =  '201706272200', endStr = '201706280400', upscaleKm = 1, min_rainrate = 0.01):
 
     startTime = ti.timestring2datetime(startStr)
     endTime = ti.timestring2datetime(endStr)
     
     self.startStr = startStr
     self.endStr = endStr
     self.startTime = startTime
     self.endTime = endTime
     self.upscaleKm = upscaleKm
     self.min_rainrate = min_rainrate
     self._DATA = {}
     
     self.load_observation()
def produce_forecast(timeStartStr,leadTimeMin):

    produce_radar_observations = False
    produce_radar_extrapolation = False
    produce_cosmo_1 = True
    
    # parameters
    rainThreshold = 0.08

    ## RADAR OBSERVATIONS
    if produce_radar_observations:
        print('Retrieve radar observations for ' + timeStartStr + ' + ' + str(leadTimeMin) + ' min')
        # start 5 min earlier to compute 10min accumulation at t0
        startTimeToPass = ti.timestring2datetime(timeStartStr) - datetime.timedelta(minutes=5)
        startTimeToPassStr = ti.datetime2timestring(startTimeToPass)
        radar_observations_5min, r = nw.get_radar_observations(startTimeToPassStr, leadTimeMin+5, product='RZC')
        # aggregate to 10-min forecast (to match COSMO1 resolution)
        radar_observations_10min = nw.aggregate_in_time(radar_observations_5min,timeAccumMin=10,type='sum')
        
    ## EXTRAPOLATION FORECAST
    if produce_radar_extrapolation:
        print('Run radar extrapolation for ' + timeStartStr + ' + ' + str(leadTimeMin) + ' min')
        # produce 5-min radar extrapolation
        radar_extrapolation_5min, radar_mask_5min = nw.radar_extrapolation(timeStartStr,leadTimeMin, product='RZC')
        # aggregate to 10-min forecast (to match COSMO1 resolution)
        radar_extrapolation_10min = nw.aggregate_in_time(radar_extrapolation_5min,timeAccumMin=10,type='sum')
        radar_mask_10min = nw.aggregate_in_time(radar_mask_5min,timeAccumMin=10,type='mean')
        # add observation at t0
        radar_extrapolation_10min = np.concatenate((radar_observations_10min[:,:,0,np.newaxis],radar_extrapolation_10min),axis=2)

    ## COSMO-1 FORECASTS
    if produce_cosmo_1:
        print('Retrive COSMO-1 for ' + timeStartStr + ' + ' + str(leadTimeMin) + ' min')
        cosmo1_10min = nw.get_cosmo1(timeStartStr, leadTimeMin)
        cosmo1_10min[cosmo1_10min <= rainThreshold] = np.nan
Ejemplo n.º 3
0
def get_radar_observations(timeStartStr, leadTimeMin, domainSize = 512, product = 'RZC', rainThreshold = 0.08):

    # Get datetime format
    startTime = ti.timestring2datetime(timeStartStr)
    endTime = startTime + datetime.timedelta(minutes=leadTimeMin)
    
    # Number of consecutive radar images to retrieve
    dt = 5 # minutes
    nimages = int((endTime - startTime).total_seconds()/60/dt) + 1
    
    # Retrieve images
    radarStack = get_n_next_radar_image(timeStartStr, nimages, domainSize, product, rainThreshold )

    # Store in Numpy array
    radar_observations = np.zeros((domainSize,domainSize,nimages))
    radar_mask = np.zeros((domainSize,domainSize,nimages))
    timestamps = []
    currentTime = startTime
    for t in xrange(len(radarStack)):
        # radar_observations[:,:,t] = radarStack[t].rainrateNans.copy()
        if radarStack[t].war>-1:
            radar_observations[:,:,t] = radarStack[t].rainrate.copy()
            radar_mask[:,:,t] = radarStack[t].mask.copy()
            timestamps.append(radarStack[t].datetime)
        else:
            radar_observations[:,:,t] = np.zeros((domainSize,domainSize))*np.nan
            radar_mask[:,:,t] = np.ones((domainSize,domainSize))
            timestamps.append(currentTime)
        
        currentTime = currentTime + datetime.timedelta(minutes=5)

    robject = radarStack[0]
    
    return radar_observations, radar_mask, timestamps, robject
def produce_radar_observation_with_accumulation(startValidTimeStr, endValidTimeStr, newAccumulationMin=10, domainSize=512, product='RZC',rainThreshold=0.08):

    # base accumulation is 5 min
    baseAccumMin = 5
    accumFactor = int(newAccumulationMin/baseAccumMin)
    
    # datetime format
    startValidTime = ti.timestring2datetime(startValidTimeStr)
    endValidTime = ti.timestring2datetime(endValidTimeStr)
    leadTimeMin = int((endValidTime - startValidTime).total_seconds()/60)

    # start to compute the correct accumulation at t0
    if newAccumulationMin>baseAccumMin:
        startTimeToPass = startValidTime - datetime.timedelta(minutes=newAccumulationMin-baseAccumMin)
    else: 
        startTimeToPass = startValidTime
    startTimeToPassStr = ti.datetime2timestring(startTimeToPass)
    
    with np.errstate(invalid='ignore'):
        radar_observations_5min, radar_mask_5min, timestamps, r = nw.get_radar_observations(startTimeToPassStr, leadTimeMin+newAccumulationMin-baseAccumMin, product=product, rainThreshold = 0)

    # convert to mm for computing accumulations
    radar_observations_5min = radar_observations_5min/60*baseAccumMin
    
    # aggregate to new accumulation
    if newAccumulationMin>baseAccumMin:
        radar_observations_new = nw.aggregate_in_time(radar_observations_5min,timeAccumMin=newAccumulationMin,type='sum')
        radar_mask_new = nw.aggregate_in_time(radar_mask_5min,timeAccumMin=newAccumulationMin,type='nansum')
        radar_mask_new[radar_mask_new==0] = np.nan
        radar_mask_new[radar_mask_new>0] = 1
        timestamps = timestamps[accumFactor-1::accumFactor]
    else:
        radar_observations_new = radar_observations_5min
        radar_mask_new = radar_mask_5min  
        
    # convert to mm/h
    radar_observations_new = radar_observations_new/newAccumulationMin*60
    
    # Apply rain threshold
    radar_observations_new[radar_observations_new<=rainThreshold] = 0
   
    # [time,y,x]
    radar_observations_new = np.rollaxis(radar_observations_new,2,0)
    radar_mask_new = np.rollaxis(radar_mask_new,2,0)
    
    return radar_observations_new,radar_mask_new,timestamps
Ejemplo n.º 5
0
    def __init__(self,
                 startStr='201706272200',
                 endStr='201706280400',
                 upscaleKm=1,
                 min_rainrate=0.01):

        startTime = ti.timestring2datetime(startStr)
        endTime = ti.timestring2datetime(endStr)

        self.startStr = startStr
        self.endStr = endStr
        self.startTime = startTime
        self.endTime = endTime
        self.upscaleKm = upscaleKm
        self.min_rainrate = min_rainrate
        self._DATA = {}

        self.load_observation()
Ejemplo n.º 6
0
def get_n_last_radar_image(timeStampStr, nimages, domainSize, product = 'RZC', rainThreshold = 0.08):
    timeStamp = ti.timestring2datetime(timeStampStr)
    radarStack = []
    nextTimeStamp = timeStamp
    for n in xrange(nimages):
        nextTimeStampStr = ti.datetime2timestring(nextTimeStamp)
        if product == 'RZC':
            r = io.read_bin_image(nextTimeStampStr,fftDomainSize=domainSize,product=product, minR = rainThreshold) # get full extent of the radar image
        else:
            r = io.read_gif_image(nextTimeStampStr,fftDomainSize=domainSize,product=product, minR = rainThreshold) # get full extent of the radar image
        radarStack.insert(0,r)
        nextTimeStamp = nextTimeStamp - datetime.timedelta(minutes=5)
    return radarStack
Ejemplo n.º 7
0
def get_n_next_radar_image(timeStampStr, nimages, domainSize, product = 'RZC', rainThreshold = 0.08):
    timeStamp = ti.timestring2datetime(timeStampStr)
    radarStack = []
    nextTimeStamp = timeStamp
    for n in xrange(nimages):
        nextTimeStampStr = ti.datetime2timestring(nextTimeStamp)
        if product=='RZC':
            r = io.read_bin_image(nextTimeStampStr,fftDomainSize=domainSize,product=product, minR = rainThreshold)
        else:
            r = io.read_gif_image(nextTimeStampStr,fftDomainSize=domainSize,product=product, minR = rainThreshold)
        radarStack.append(r)
        nextTimeStamp = nextTimeStamp + datetime.timedelta(minutes=5)
    return radarStack
Ejemplo n.º 8
0
plotSpectrum = args.spec
extension = '.' + args.format
product = args.product
if (int(args.start) > int(args.end)):
    print('Time end should be after time start')
    sys.exit(1)

if (int(args.start) < 198001010000) or (int(args.start) > 203001010000):
    print('Invalid -start or -end time arguments.')
    sys.exit(1)
else:
    timeStartStr = args.start
    timeEndStr = args.end

### Get list of filenames
timeStart = ti.timestring2datetime(timeStartStr)
timeEnd = ti.timestring2datetime(timeEndStr)

fileNameExpr = product + '_' + plotSpectrum + 'PS_*'
fileList = io.get_files_period(timeStart,
                               timeEnd,
                               inBaseDir,
                               fileNameExpr,
                               tempResMin=5)

# Check if there are file found
if len(fileList) == 0:
    print('No files found to generate video. Check your input filestamps.')
    sys.exit()

######## Generate clip
def get_radar_extrapolation(startValidTimeStr,endValidTimeStr, newAccumulationMin=10, domainSize=512, rainThreshold=0.08, product='RZC', outBaseDir='/scratch/ned/data/'):

    # datetime format
    startValidTime = ti.timestring2datetime(startValidTimeStr)
    endValidTime = ti.timestring2datetime(endValidTimeStr)
    leadTimeMin = int((endValidTime - startValidTime).total_seconds()/60)
    
    # Check if the nc file already exists
    year = startValidTime.year
    yearStr =  str(year)[2:4]
    julianDay = startValidTime.timetuple().tm_yday
    julianDayStr = '%03i' % julianDay
    yearJulianStr = yearStr + julianDayStr
    outDir = outBaseDir + startValidTime.strftime("%Y") + '/' + startValidTime.strftime("%y") + julianDayStr + '/'
    fcstName = 'radar-extrapolation_' + startValidTime.strftime("%Y%m%d%H%M") + '_' + str(int(leadTimeMin/60)) + 'hours'
    fcstFile = r'%s' % (outDir + fcstName + '.nc')
    
    # base accumulation is 5 min
    baseAccumMin = 5
    accumFactor = int(newAccumulationMin/baseAccumMin)

    # if not produce forecasts
    if os.path.isfile(fcstFile) == False:
        
        # produce 5-min radar extrapolation
        radar_extrapolation_5min, timestamps_5min = nw.radar_extrapolation(startValidTimeStr,leadTimeMin,finalDomainSize=domainSize,product=product,rainThreshold=rainThreshold)
        if product=='RZC':
            r = io.read_bin_image(startValidTimeStr, fftDomainSize=domainSize, inBaseDir = '/scratch/ned/data/')
        else:
            r = io.read_gif_image(startValidTimeStr, fftDomainSize=domainSize, inBaseDir = '/scratch/ned/data/')
        
        if accumFactor>1:
            # convert to mm 
            radar_extrapolation_5min = radar_extrapolation_5min/60*baseAccumMin
            
            # aggregate to new accumulation (to match COSMO1 resolution)
            radar_extrapolation_new = nw.aggregate_in_time(radar_extrapolation_5min,timeAccumMin=newAccumulationMin,type='sum')
            timestamps_new = timestamps_5min[accumFactor-1::accumFactor]
            
            # convert to mm/h
            radar_extrapolation_new = radar_extrapolation_new/newAccumulationMin*60
             
            # get observations at t0 [mm/h]
            radar_observations_t0, _, _ = produce_radar_observation_with_accumulation(startValidTimeStr, startValidTimeStr, newAccumulationMin, domainSize=domainSize, product=product)
            radar_observations_t0 = np.squeeze(radar_observations_t0)[:,:,None]
        else:
            # no need to aggregate forecasts
            radar_extrapolation_new = radar_extrapolation_5min
            timestamps_new = timestamps_5min
        
            # get observations at t0
            radar_observations_t0 = r.rainrate[:,:,None]
            
        # add observation for t0
        radar_extrapolation_new = np.concatenate((radar_observations_t0,radar_extrapolation_new),axis=2)
        timestamps_new.insert(0,startValidTime)
        timestamps_new = np.array(timestamps_new)
            
        # [time,y,x]
        radar_extrapolation_new = np.rollaxis(radar_extrapolation_new,2,0)

        # save netcdf 
        Xcoords = r.subXcoords
        Ycoords = r.subYcoords
        save_3darray_netcdf(fcstFile, radar_extrapolation_new, 'radar_extrapolation',\
                        timestamps_new,Xcoords,Ycoords)

    print('Read: ' + fcstFile)
    # Now read the NetCDF file
    radar_extrapolation, timestamps, Xcoords, Ycoords = load_3darray_netcdf(fcstFile)
    
    testplot=False
    if testplot:
        n=0
        while n<100:
            n+=1
            for t in xrange(timestamps.shape[0]):
                plt.clf()
                plt.imshow(radar_extrapolation[t,:,:],interpolation ='nearest',vmin=0,vmax=65, extent=[Xcoords.min(), Xcoords.max(), Ycoords.min(), Ycoords.max()])
                plt.title(timestamps[t])
                plt.pause(1)

    return radar_extrapolation, timestamps, Xcoords, Ycoords
def get_cosmoE10min(startValidTimeStr, endValidTimeStr, members = 'all', domainSize=512, outBaseDir='/scratch/ned/data/', cosmoBaseDir='/store/s83/tsm/EXP_TST/611/',rainThreshold=0.08,latencyTimeMin=100,lag=0,overwrite=False,useavailable=True):

    # datetime format
    startValidTime = ti.timestring2datetime(startValidTimeStr)
    endValidTime = ti.timestring2datetime(endValidTimeStr)
    timebounds = [startValidTime, endValidTime]
    
    # Check if the single nc file already exists
    if members=='all':
        year = startValidTime.year
        yearStr =  str(year)[2:4]
        julianDay = startValidTime.timetuple().tm_yday
        julianDayStr = '%03i' % julianDay
        yearJulianStr = yearStr + julianDayStr
        outDir = outBaseDir + startValidTime.strftime("%Y") + '/' + startValidTime.strftime("%y") + julianDayStr + '/'
        fcstName = 'COSMOE10min_' + startValidTime.strftime("%Y%m%d%H%M") + '_' + endValidTime.strftime("%Y%m%d%H%M") + '_lag' +  str(lag) + '_ltMin' + str(int(latencyTimeMin))
        fcstFile = r'%s' % (outDir + fcstName + '.nc')
    else:
        fcstFile = 'donotsavethisfile'
    
    # if not load original forecasts
    if (not os.path.isfile(fcstFile)) or overwrite:
        print(fcstFile + ' not found.')
        
        analysis_not_found = True
        nloops = 0
        while analysis_not_found and (nloops < 2):
        
            # Get most recent run (or second most recent)
            analysisTimeStr = rfe.find_nearest_forecast(startValidTimeStr, 'cosmo-e', lat_timeMin = latencyTimeMin, lag = lag)
            print('run time: ',analysisTimeStr)
            analysisTime = ti.timestring2datetime(analysisTimeStr)
            
            # list of EPS members to load
            if members == 'all':
                members = range(21)
                 
            # Folder to the nc files
            outDir = cosmoBaseDir + 'FCST' + analysisTime.strftime("%y") + '/' + analysisTime.strftime("%y%m%d%H") + '_611/output/'
            if os.path.isdir(outDir) == False:
                print('Folder not found: ' + outDir)
                if useavailable:
                    startValidTime -= datetime.timedelta(hours=12)
                    startValidTimeStr = ti.datetime2timestring(startValidTime)
                    nloops+=1
                else:
                    return None,None,None,None
            else:
                analysis_not_found=False

        if analysis_not_found:
            return None,None,None,None
        
        # Load individual EPS member and merge them in one array
        countm=0
        for member in members:
            thisFcstFile = outDir + 'cosmo-e_TOT_PREC_' + str(member).zfill(3) + '.nc'

            if not os.path.isfile(thisFcstFile):
                print('File not found: ' + thisFcstFile)
                sys.exit()
            else:
                print('Read: ' + thisFcstFile)
                
                # read the NetCDF file
                
                # this_member, timestamps, Xcoords, Ycoords = load_3darray_netcdf_with_bounds(thisFcstFile, timebounds, domainSize) # need to fix this...
                this_member, timestamps, Xcoords, Ycoords = load_3darray_netcdf(thisFcstFile)
                
                # print(timebounds)
                # print(this_member.shape)
                
                # flip and merge together the members
                if member == members[0]:
                    if domainSize>0:
                        cosmoe_data = np.zeros((this_member.shape[0], len(members), domainSize, domainSize))
                    else:
                        cosmoe_data = np.zeros((this_member.shape[0], len(members),this_member.shape[2],this_member.shape[3]))
                for i in xrange(this_member.shape[0]):
                    # flip frames
                    this_frame = np.flipud(this_member[i,0,:,:])
                    if domainSize>0:
                        cosmoe_data[i,countm,:,:] = dt.extract_middle_domain(this_frame, domainSize, domainSize)
                    else:
                        cosmoe_data[i,countm,:,:] = this_frame
                del this_member
            countm+=1
            
        # convert to mm/h
        cosmoe_data = cosmoe_data*6 
        
        # Get coordinates of reduced domain
        if domainSize>0:
            extent = dt.get_reduced_extent(Xcoords.shape[0], Ycoords.shape[0], domainSize, domainSize)
            Xmin = Xcoords[extent[0]]
            Ymin = Ycoords[extent[1]]
            Xmax = Xcoords[extent[2]]
            Ymax = Ycoords[extent[3]]    
            extent = (Xmin, Xmax, Ymin, Ymax)
            Xcoords = np.arange(Xmin,Xmax,1000)
            Ycoords = np.arange(Ymin,Ymax,1000) 
        else:
            extent = (Xcoords.min(), Xcoords.max(), Ycoords.min(), Ycoords.max())
        # Extract timestamps
        idxKeep1 = timestamps >= timebounds[0]
        idxKeep2 = timestamps <= timebounds[1]
        idxKeep = np.logical_and(idxKeep1,idxKeep2)
        cosmoe_data = cosmoe_data[idxKeep,:,:,:]
        timestamps = timestamps[idxKeep]
        
        # and store it before loading it again
        if (not fcstFile=='donotsavethisfile'):
            # print(fcstFile,cosmoe_data.shape,timestamps.shape,Xcoords.shape,Ycoords.shape)
            save_4darray_netcdf(fcstFile, cosmoe_data, 'COSMO-E10min',\
                            timestamps,cosmoe_data.shape[1],Xcoords,Ycoords)
    else:
        print('Read: ' + fcstFile)
        # Now read the NetCDF file
        cosmoe_data, timestamps, members, Ycoords, Xcoords = load_4darray_netcdf(fcstFile)
            
    # Apply rain threshold
    cosmoe_data[cosmoe_data<=rainThreshold] = 0
       
    testplot=False
    if testplot:
        nmember=0
        n=0
        while n<100:
            n+=1
            for t in xrange(timestamps.shape[0]):
                plt.clf()
                plt.imshow(10*np.log10(cosmoe_data[t,nmember,:,:]),interpolation ='nearest',vmin=-12,vmax=20, extent=[Xcoords.min(), Xcoords.max(), Ycoords.min(), Ycoords.max()])
                plt.title(timestamps[t])
                plt.pause(1)

    return cosmoe_data, timestamps, Xcoords, Ycoords   
def get_lagged_cosmo1(startValidTimeStr, endValidTimeStr, domainSize=512, leadTimeMin = 6*60, outBaseDir='/scratch/ned/data/', rainThreshold=0.08):
  
    # get all individual forecasts' starting times
    latencyTimeHr = 1#100/60
    maxLeadTimeHr=33
    fcstStarts,fcstMembers,fcstLeadTimeStartsMin,fcstLeadTimeStopsMin = rf1.get_lagged_ensemble_members(startValidTimeStr, endValidTimeStr, latencyTimeHr, maxLeadTimeHr)
    nmembers = len(fcstMembers)
    
    # if necessary, extract them with fieldextra
    filesOut = []
    analysisTimes = []
    for n in xrange(nmembers):
        print('----------------------------------------------------')
        print(fcstMembers[n] + ', analysis time: ' + fcstStarts[n].strftime("%Y%m%d%H%M%S") + ', ' + str(fcstLeadTimeStartsMin[n]/60) + ' to ' + str(fcstLeadTimeStopsMin[n]/60) + ' hours.')
        
        # Check if the nc file already exists
        year = fcstStarts[n].year
        yearStr =  str(year)[2:4]
        julianDay = fcstStarts[n].timetuple().tm_yday
        julianDayStr = '%03i' % julianDay
        yearJulianStr = yearStr + julianDayStr
        outDir = outBaseDir + fcstStarts[n].strftime("%Y") + '/' + fcstStarts[n].strftime("%y") + julianDayStr + '/'
        
        it_exists = False
        LeadTimeToCheck = fcstLeadTimeStopsMin[n]/60
        while (it_exists == False) and (LeadTimeToCheck <= maxLeadTimeHr):
            fcstName = fcstStarts[n].strftime("%Y%m%d%H%M") + '_' + str(int(LeadTimeToCheck)) + 'hours'
            fcstFile = r'%s' % (outDir + 'cosmo-1_TOT_PREC_' + fcstName + '.nc')
            if os.path.isfile(fcstFile) == False:
                LeadTimeToCheck += 1
            else:
                it_exists = True
                break
        
        if it_exists == False:
            LeadTimeToGetHrs = np.minimum(maxLeadTimeHr, np.ceil(( fcstLeadTimeStopsMin[n]/60 )/12)*12)
            fcstFile = rf1.run_fieldextra_c1(fcstStarts[n].strftime("%Y%m%d%H%M"),LeadTimeToGetHrs*60,outBaseDir=outBaseDir)
        
        print('read: ' + fcstFile)
        
        # Now read the NetCDF file
        cosmo1_data, timestamps, Xcoords, Ycoords = load_3darray_netcdf(fcstFile)
        
        # exclude first time step (it's all NaNs!)
        cosmo1_data=cosmo1_data[1:,:,:]
        timestamps=timestamps[1:]
        
        # convert to mm/h
        cosmo1_data = cosmo1_data*6
        # flip and extract middle domain
        new_data = np.zeros((cosmo1_data.shape[0],domainSize,domainSize))
        for i in range(cosmo1_data.shape[0]):
            # flip frames
            cosmo1_data[i,:,:] = np.flipud(cosmo1_data[i,:,:])
            # cut domain
            if domainSize>0:
                new_data[i,:,:] = dt.extract_middle_domain(cosmo1_data[i,:,:], domainSize, domainSize)
            else:
                new_data[i,:,:] = cosmo1_data[i,:,:]
        cosmo1_data = new_data
        
        # Apply rain threshold
        cosmo1_data[cosmo1_data<=rainThreshold] = 0 
        
        
        # Extract timestamps
        idxKeep1 = timestamps >= ti.timestring2datetime(startValidTimeStr)
        idxKeep2 = timestamps <= ti.timestring2datetime(endValidTimeStr)
        cosmo1_data = cosmo1_data[idxKeep1*idxKeep2,:,:]
        timestamps = timestamps[idxKeep1*idxKeep2]
        
        # Build 4D array with all members
        if n==0:
            cosmo1_lagged_data = np.zeros((cosmo1_data.shape[0],nmembers,cosmo1_data.shape[1],cosmo1_data.shape[2]))
        cosmo1_lagged_data[:,n,:,:] = cosmo1_data.copy()


    # Get coordinates of reduced domain
    if domainSize>0:
        extent = dt.get_reduced_extent(Xcoords.shape[0], Ycoords.shape[0], domainSize, domainSize)
        Xmin = Xcoords[extent[0]]
        Ymin = Ycoords[extent[1]]
        Xmax = Xcoords[extent[2]]
        Ymax = Ycoords[extent[3]]    
    extent = (Xmin, Xmax, Ymin, Ymax)
    subXcoords = np.arange(Xmin,Xmax,1000)
    subYcoords = np.arange(Ymin,Ymax,1000) 
    
    testplot=False
    if testplot:
        nmember=0
        n=0
        while n<100:
            n+=1
            for t in xrange(timestamps.shape[0]):
                plt.clf()
                plt.imshow(cosmo1_lagged_data[t,nmember,:,:],interpolation ='nearest',vmin=0,vmax=65, extent=[subXcoords.min(), subXcoords.max(), subYcoords.min(), subYcoords.max()])
                plt.title(timestamps[t])
                plt.pause(1)
    
    return cosmo1_lagged_data, timestamps, subXcoords, subYcoords       
def get_cosmoE(startValidTimeStr, endValidTimeStr, analysisTimeStr = [], domainSize=512, leadTimeMin = 24*60, outBaseDir='/scratch/ned/data/',rainThreshold=0.08, latencyTimeMin=100, overwrite = False):

    # Get most recent run
    if len(analysisTimeStr)==0: 
        analysisTimeStr = rfe.find_nearest_forecast(startValidTimeStr, 'cosmo-e', lat_timeMin = latencyTimeMin)
    analysisTime = ti.timestring2datetime(analysisTimeStr)
    leadtimeHrs = int (( ti.timestring2datetime(endValidTimeStr) - analysisTime ).total_seconds()/3600 )
    
    # # Check if the nc file already exists
    # year = analysisTime.year
    # yearStr =  str(year)[2:4]
    # julianDay = analysisTime.timetuple().tm_yday
    # julianDayStr = '%03i' % julianDay
    # yearJulianStr = yearStr + julianDayStr
    # outDir = outBaseDir + analysisTime.strftime("%Y") + '/' + analysisTime.strftime("%y") + julianDayStr + '/'
    # # fcstName = analysisTime.strftime("%Y%m%d%H%M") + '_' + str(int(leadTimeMin/60)) + 'hours'
    # fcstName = analysisTimeStr + '_' + startValidTimeStr + '_' + endValidTimeStr
    # fcstFile = r'%s' % (outDir + 'cosmo-E_TOT_PREC_' + fcstName + '.nc')
    # # if not call fieldextra
    # if os.path.isfile(fcstFile) == False:
        # rfe.run_fieldextra_ce(analysisTimeStr,startValidTimeStr,endValidTimeStr,outBaseDir=outBaseDir)
        
    outFile = rfe.run_fieldextra_forecast(analysisTimeStr, leadtimeHrs, fieldName='TOT_PREC', outBaseDir = '/scratch/ned/data/', modelName='cosmo-e', deltaMin = 60, overwrite = overwrite)
        
    print('Read: ' + outFile)
    
    # Now read the NetCDF file
    cosmoe_data, timestamps, members, Ycoords, Xcoords = load_4darray_netcdf(outFile)

    # exclude first time step (it's all NaNs!)
    # cosmoe_data=cosmoe_data[1:,:,:,:]
    # timestamps=timestamps[1:]
    
    # flip and extract middle domain
    new_data = np.zeros((cosmoe_data.shape[0], cosmoe_data.shape[1],domainSize,domainSize))
    for i in xrange(cosmoe_data.shape[0]):
        for m in xrange(cosmoe_data.shape[1]):
            # flip frames
            cosmoe_data[i,m,:,:] = np.flipud(cosmoe_data[i,m,:,:])
            # cut domain
            if domainSize>0:
                new_data[i,m,:,:] = dt.extract_middle_domain(cosmoe_data[i,m,:,:], domainSize, domainSize)
            else:
                new_data[i,m,:,:] = cosmoe_data[i,m,:,:]
    cosmoe_data = new_data
    
    # Apply rain threshold
    cosmoe_data[cosmoe_data<=rainThreshold] = 0 
    
    # Get coordinates of reduced domain
    if domainSize>0:
        extent = dt.get_reduced_extent(Xcoords.shape[0], Ycoords.shape[0], domainSize, domainSize)
        Xmin = Xcoords[extent[0]]
        Ymin = Ycoords[extent[1]]
        Xmax = Xcoords[extent[2]]
        Ymax = Ycoords[extent[3]]    
    extent = (Xmin, Xmax, Ymin, Ymax)
    subXcoords = np.arange(Xmin,Xmax,1000)
    subYcoords = np.arange(Ymin,Ymax,1000) 

    # Extract timestamps
    idxKeep1 = timestamps >= ti.timestring2datetime(startValidTimeStr)
    idxKeep2 = timestamps <= ti.timestring2datetime(endValidTimeStr)
    cosmoe_data = cosmoe_data[idxKeep1*idxKeep2,:,:,:]
    timestamps = timestamps[idxKeep1*idxKeep2]
    
    testplot=False
    if testplot:
        nmember=0
        n=0
        while n<100:
            n+=1
            for t in xrange(timestamps.shape[0]):
                plt.clf()
                plt.imshow(cosmoe_data[t,nmember,:,:],interpolation ='nearest',vmin=0,vmax=65, extent=[subXcoords.min(), subXcoords.max(), subYcoords.min(), subYcoords.max()])
                plt.title(timestamps[t])
                plt.pause(1)
    
    return cosmoe_data, timestamps, subXcoords, subYcoords   
def get_ensemble_radar_extrapolation(startValidTimeStr,endValidTimeStr, NumberMembers = 2, NumberLevels = 1, newAccumulationMin=10, rainThreshold = 0.08, domainSize=512, local_level = 0, seed = 42, product='RZC', outBaseDir='/scratch/ned/data/'):

    # datetime format
    startValidTime = ti.timestring2datetime(startValidTimeStr)
    endValidTime = ti.timestring2datetime(endValidTimeStr)
    leadTimeMin = int((endValidTime - startValidTime).total_seconds()/60)
    
    # Check if the nc file already exists
    year = startValidTime.year
    yearStr =  str(year)[2:4]
    julianDay = startValidTime.timetuple().tm_yday
    julianDayStr = '%03i' % julianDay
    yearJulianStr = yearStr + julianDayStr
    outDir = outBaseDir + startValidTime.strftime("%Y") + '/' + startValidTime.strftime("%y") + julianDayStr + '/'
    fcstName = 'ensemble-radar-extrapolation_' + startValidTime.strftime("%Y%m%d%H%M") + '_' + str(int(leadTimeMin/60)) + 'hours' + '_' + str(NumberMembers) + 'members' + '_' + str(NumberLevels) + 'levels' + '_' + str(newAccumulationMin) + 'min' + '_' + product + '_' + str(domainSize) + 'km' + '_' + str(local_level) + 'local_seed' + str(seed)
    fcstNameMask =  fcstName + '_mask'
    fcstFile = r'%s' % (outDir + fcstName + '.nc')
    fcstFileMask = r'%s' % (outDir + fcstNameMask + '.nc')
    
    # base accumulation is 5 min
    baseAccumMin = 5
    accumFactor = int(newAccumulationMin/baseAccumMin)

    # if not produce forecasts
    if os.path.isfile(fcstFile) == False:
        
        # produce 5-min radar extrapolation
        radar_extrapolation_5min, timestamps_5min, radarMask_5min = nw.probabilistic_radar_extrapolation(startValidTimeStr,leadTimeMin,finalDomainSize=domainSize,NumberMembers=NumberMembers,NumberLevels=NumberLevels,product=product,local_level=local_level,seed=seed)
        if product=='RZC':
            r = io.read_bin_image(startValidTimeStr, fftDomainSize=domainSize, product=product, inBaseDir = '/scratch/ned/data/')
        else:
            r = io.read_gif_image(startValidTimeStr, fftDomainSize=domainSize, product=product, inBaseDir = '/scratch/ned/data/')
        # print(radarMask_5min.shape)
        if accumFactor>1:
            # convert to mm 
            radar_extrapolation_5min = radar_extrapolation_5min/60*baseAccumMin
            
            # aggregate to new accumulation (to match COSMO1 resolution)
            for m in xrange(NumberMembers):
                radar_extrapolation_new_member = nw.aggregate_in_time(radar_extrapolation_5min[:,:,:,m],timeAccumMin=newAccumulationMin,type='sum')
                if m==0:
                    radar_extrapolation_new = np.zeros((radar_extrapolation_new_member.shape[0],radar_extrapolation_new_member.shape[1],radar_extrapolation_new_member.shape[2],NumberMembers))
                radar_extrapolation_new[:,:,:,m] = radar_extrapolation_new_member
            
            timestamps_new = timestamps_5min[accumFactor-1::accumFactor]
            radarMask_new = radarMask_5min[:,:,accumFactor-1::accumFactor]
            # print(radarMask_new.shape)
            # convert to mm/h
            radar_extrapolation_new = radar_extrapolation_new/newAccumulationMin*60
             
            # get observations at t0 [mm/h]
            radar_observations_t0, _, _ = produce_radar_observation_with_accumulation(startValidTimeStr, startValidTimeStr, newAccumulationMin, domainSize, product=product)
            radar_observations_t0 = np.squeeze(radar_observations_t0)
            radarmask_t0 = r.mask.copy()
            radarmask_t0 = np.array(np.isnan(radarmask_t0),dtype=int)
            # print(radarmask_t0.shape)
        else:
            # no need to aggregate forecasts
            radar_extrapolation_new = radar_extrapolation_5min
            timestamps_new = timestamps_5min
            radarMask_new = radarMask_5min
        
            # get observations at t0
            radar_observations_t0 = r.rainrate.copy()
            radarmask_t0 = r.mask.copy()
            radarmask_t0 = np.array(np.isnan(radarmask_t0),dtype=int)
            
        radar_observations_t0_allmembers = np.zeros((radar_observations_t0.shape[0],radar_observations_t0.shape[1],NumberMembers))
        for m in xrange(NumberMembers):
            radar_observations_t0_allmembers[:,:,m] = radar_observations_t0.copy()
            
        radar_observations_t0_allmembers = radar_observations_t0_allmembers[:,:,None,:]
        radarmask_t0 = radarmask_t0[:,:,None]
        # print(radarmask_t0.shape)
        # add observation for t0
        radar_extrapolation_new = np.concatenate((radar_observations_t0_allmembers,radar_extrapolation_new),axis=2)
        radarMask_new = np.concatenate((radarmask_t0,radarMask_new),axis=2)
        timestamps_new.insert(0,startValidTime)
        timestamps_new = np.array(timestamps_new)
        
        # [time,member,y,x]
        radar_extrapolation_new = np.rollaxis(radar_extrapolation_new,2,0)
        radar_extrapolation_new = np.rollaxis(radar_extrapolation_new,3,1)
        radarMask_new = np.rollaxis(radarMask_new,2,0)
        
        # Apply rain threshold
        radar_extrapolation_new[radar_extrapolation_new<=rainThreshold] = 0
        
        # save netcdf 
        Xcoords = r.subXcoords
        Ycoords = r.subYcoords
        save_4darray_netcdf(fcstFile, radar_extrapolation_new, 'ensemble_radar_extrapolation',\
                        timestamps_new,NumberMembers,Xcoords,Ycoords)
        save_3darray_netcdf(fcstFileMask, radarMask_new, 'ensemble_radar_extrapolation_mask',\
                        timestamps_new,Xcoords,Ycoords)

    print('Read: ' + fcstFile)
    # Now read the NetCDF file
    ensemble_radar_extrapolation, timestamps, members, Ycoords, Xcoords = load_4darray_netcdf(fcstFile)
    if os.path.isfile(fcstFileMask):
        ensemble_radar_extrapolation_mask, _, _, _ = load_3darray_netcdf(fcstFileMask)
    else: 
        print('Radar mask file not found.')
        ensemble_radar_extrapolation_mask = np.zeros((timestamps.size,ensemble_radar_extrapolation.shape[2],ensemble_radar_extrapolation.shape[3]))*np.nan
        
    testplot=False
    if testplot:
        r = io.read_gif_image(startValidTimeStr)
        nmember=0
        n=0
        while n<100:
            n+=1
            for t in xrange(timestamps.shape[0]):
                plt.clf()
                plt.imshow(ensemble_radar_extrapolation[t,nmember,:,:],interpolation ='nearest',norm=r.norm,cmap=r.cmap)
                plt.title(timestamps[t])
                plt.pause(1)

    return ensemble_radar_extrapolation, timestamps, members, Xcoords, Ycoords, ensemble_radar_extrapolation_mask
# Parse input arguments
parser = argparse.ArgumentParser(description='Create time periods to pass to the bash script for parallel radar data processing.')
parser.add_argument('-start', default='201601010000', type=str,help='Starting date YYYYMMDDHHmmSS.')
parser.add_argument('-end', default='201601310000', type=str,help='Starting date YYYYMMDDHHmmSS.')
parser.add_argument('-n', default=-1, type=int,help='Number of periods.')
parser.add_argument('-days', default=-1, type=int,help='Periods interval length [days].')
parser.add_argument('-accum', default=5, type=int,help='Accumulation time of products.')
parser.add_argument('-start0', default=1, type=int,help='Whether to start the periods at 0 time or args.accum later.')

args = parser.parse_args()

if (args.n == -1) & (args.days == -1):
    print('You have to define either the number of periods -p or the interval length in days -days')
    sys.exit(1)

timeStart = ti.timestring2datetime(args.start)
timeEnd = ti.timestring2datetime(args.end)

# Compute total duration of the period
totalDurationSec = (timeEnd - timeStart).total_seconds()
timeDeltaAccum = datetime.timedelta(minutes = args.accum)

###### Get time stamps based on number of intervals (not tested)
if (args.n != -1) & (args.days == -1):
    # Compute duration of the period intervals
    durationPeriodSec = int(totalDurationSec/args.n)
    if durationPeriodSec >= totalDurationSec:
        numberPeriods = 1
        timeStampsArray = np.array([args.start,args.end])
    else:   
        timeStamps = []
Ejemplo n.º 15
0
advectionScheme = args.adv
frameRate = args.frameRate
product = args.product
leadtime = args.leadtime
timewindow = np.max((5,args.stack))

if (int(args.start) < 198001010000) or (int(args.start) > 203001010000):
    print('Invalid -start time arguments.')
    sys.exit(1)
else:
    timeStartStr = args.start
    


######## Get dattime from timestamp
timeStart = ti.timestring2datetime(timeStartStr)
timeAccumMinStr = '%05i' % timeAccumMin
timeAccum24hStr = '%05i' % (24*60)    

######## GIS stuff
# Limits of CCS4 domain
Xmin = 255000
Xmax = 965000
Ymin = -160000
Ymax = 480000
allXcoords = np.arange(Xmin,Xmax+resKm*1000,resKm*1000)
allYcoords = np.arange(Ymin,Ymax+resKm*1000,resKm*1000)

# Shapefile filename
fileNameShapefile = "/users/" + usrName + "/pyscripts/shapefiles/CHE_adm0.shp"
proj4stringWGS84 = "+proj=longlat +ellps=WGS84 +datum=WGS84"
args = parser.parse_args()

if (int(args.start) < 198001010000) or (int(args.start) > 203001010000):
    print('Invalid -start or -end time arguments.')
    sys.exit(1)
else:
    timeStartStr = args.start
    timeEndStr = args.end

product = args.product
timeAccumMin = args.accum
timeAccumMinStr = '%05i' % timeAccumMin
username = args.username
################################

timeStart = ti.timestring2datetime(timeStartStr)
timeEnd = ti.timestring2datetime(timeEndStr)

##### LOOP OVER FILES #############
tic = time.clock()
timeLocal = timeStart
while timeLocal <= timeEnd:
    year = timeLocal.year
    hour = timeLocal.hour
    minute = timeLocal.minute
    # Get Julian day
    julianDay = ti.get_julianday(timeLocal)

    # Create filename
    yearStr =  str(year)[2:4]
    julianDayStr = '%03d' % julianDay
def get_cosmo1(startValidTimeStr, endValidTimeStr,domainSize=512, leadTimeMin = 12*60, outBaseDir='/scratch/ned/data/',rainThreshold=0.08):
        
    # Get most recent run
    analysisTimeStr = rf1.find_nearest_run_cosmo1(startValidTimeStr)
    analysisTime = ti.timestring2datetime(analysisTimeStr)
        
    # Check if the nc file already exists
    year = analysisTime.year
    yearStr =  str(year)[2:4]
    julianDay = analysisTime.timetuple().tm_yday
    julianDayStr = '%03i' % julianDay
    yearJulianStr = yearStr + julianDayStr
    outDir = outBaseDir + analysisTime.strftime("%Y") + '/' + analysisTime.strftime("%y") + julianDayStr + '/'
    fcstName = analysisTime.strftime("%Y%m%d%H%M") + '_' + str(int(leadTimeMin/60)) + 'hours'
    fcstFile = r'%s' % (outDir + 'cosmo-1_TOT_PREC_' + fcstName + '.nc')
    # if not call fieldextra
    if os.path.isfile(fcstFile) == False:
        rf1.run_fieldextra_c1(analysisTimeStr,leadTimeMin,outBaseDir=outBaseDir)
    print('Read: ' + fcstFile)
    
    # Now read the NetCDF file
    cosmo1_data, timestamps, Xcoords, Ycoords = load_3darray_netcdf(fcstFile)
    
    # exclude first time step (it's all NaNs!)
    cosmo1_data=cosmo1_data[1:,:,:]
    timestamps=timestamps[1:]
    
    # convert to mm/h
    cosmo1_data = cosmo1_data*6
    # flip and extract middle domain
    new_data = np.zeros((cosmo1_data.shape[0],domainSize,domainSize))
    for i in range(cosmo1_data.shape[0]):
        # flip frames
        cosmo1_data[i,:,:] = np.flipud(cosmo1_data[i,:,:])
        # cut domain
        if domainSize>0:
            new_data[i,:,:] = dt.extract_middle_domain(cosmo1_data[i,:,:], domainSize, domainSize)
        else:
            new_data[i,:,:] = cosmo1_data[i,:,:]
    cosmo1_data = new_data
    
    # Apply rain threshold
    cosmo1_data[cosmo1_data<=rainThreshold] = 0 
    
    # Get coordinates of reduced domain
    if domainSize>0:
        extent = dt.get_reduced_extent(Xcoords.shape[0], Ycoords.shape[0], domainSize, domainSize)
        Xmin = Xcoords[extent[0]]
        Ymin = Ycoords[extent[1]]
        Xmax = Xcoords[extent[2]]
        Ymax = Ycoords[extent[3]]    
    extent = (Xmin, Xmax, Ymin, Ymax)
    subXcoords = np.arange(Xmin,Xmax,1000)
    subYcoords = np.arange(Ymin,Ymax,1000) 

    # Extract timestamps
    idxKeep1 = timestamps >= ti.timestring2datetime(startValidTimeStr)
    idxKeep2 = timestamps <= ti.timestring2datetime(endValidTimeStr)
    cosmo1_data = cosmo1_data[idxKeep1*idxKeep2,:,:]
    timestamps = timestamps[idxKeep1*idxKeep2]
    
    testplot=False
    if testplot:
        n=0
        while n<100:
            n+=1
            for t in xrange(timestamps.shape[0]):
                plt.clf()
                plt.imshow(cosmo1_data[t,:,:],interpolation ='nearest',vmin=0,vmax=65, extent=[subXcoords.min(), subXcoords.max(), subYcoords.min(), subYcoords.max()])
                plt.title(timestamps[t])
                plt.pause(1)
    
    return cosmo1_data, timestamps, subXcoords, subYcoords