def get_HRRR_value(validDATE, variable, fxx): """ Get HRRR data. Return just the value, not the latitude and longitude. If data is np.nan, then return None, i.e. the shape of the data is (). This makes it possible to filter out the None values later. """ runDATE = validDATE - timedelta(hours=fxx) if variable.split(':')[0] == 'UVGRD': #print("getting U and V components") Hu = get_hrrr_variable(runDATE, 'UGRD', fxx=fxx, verbose=False)['value'] Hv = get_hrrr_variable(runDATE, 'VGRD', fxx=fxx, verbose=False)['value'] H = wind_uv_to_spd(Hu, Hv) else: H = get_hrrr_variable(runDATE, variable, fxx=fxx, verbose=False)['value'] if np.shape(H) == (): # Then the data is a nan. Return None so it can be filtered out. print("!! WARNING !! COULD NOT GET %s %s f%02d" % (variable, runDATE, fxx)) return None else: return H
def get_HRRR_value(validDATE): """ Get HRRR data. Retrun the value, not the latitude and longitude. """ runDATE = validDATE - timedelta(hours=fxx) if variable[:2] == 'UV': # Then we want to calculate the wind speed from U and V components. wind_lev = variable.split(':')[-1] # This is the level of interest Hu = get_hrrr_variable(runDATE, 'UGRD:'+wind_lev, fxx=fxx, model='hrrr', field='sfc', value_only=True, verbose=False) Hv = get_hrrr_variable(runDATE, 'VGRD:'+wind_lev, fxx=fxx, model='hrrr', field='sfc', value_only=True, verbose=False) if np.shape(Hu['value']) == () or np.shape(Hv['value']) == (): print("!! WARNING !! COULD NOT GET %s %s f%02d" % (variable, runDATE, fxx)) return None else: spd = wind_uv_to_spd(Hu['value'], Hv['value']) return spd else: H = get_hrrr_variable(runDATE, variable, fxx=fxx, model='hrrr', field='sfc', value_only=True, verbose=False) if np.shape(H['value']) == (): # If the data is a nan, then return None. This will be important for filtering. print("!! WARNING !! COULD NOT GET %s %s f%02d" % (variable, runDATE, fxx)) return None else: return H['value']
def spread(validDATE, variable, fxx=range(0, 19), verbose=True): """ Compute the HRRR model spread for a single analysis time. Input: validDATE - Datetime object of the analysis hour. variable - HRRR GRIB2 variable string (i.e. 'TMP:2 m') fxx - Range of hours to include in the """ timer = datetime.now() # List of runDates and forecast lead time that correspond to the validDATE. # First item is the runDATE, second item is the corresponding fxx run_fxx = [(validDATE - timedelta(hours=f), f) for f in fxx] # Download all forecasts for the validDATE from Pando H = np.array([ get_hrrr_variable(z[0], variable, fxx=z[1], verbose=False, value_only=True)['value'] for z in run_fxx ]) spread = np.std(H, axis=0) if verbose: print('Timer for HRRR_Spread.spread():', datetime.now - timer) return spread
def RMSD(validDATE, variable, FORECASTS=range(19), verbose=True): """ Root-mean-square-difference for a single forecast time. datetime(2018, 8, 12, 21) # period of convection over Coal Hollow Fire variable = 'CAPE:surface' """ # Load all forecasts grids for this time if variable.split(':')[0] == 'UVGRD': values = np.array([get_hrrr_variable(validDATE-timedelta(hours=f), variable, fxx=f, value_only=True, verbose=False)['SPEED'] for f in FORECASTS]) forecasts = np.array([i for i in values if np.shape(i) != ()]) else: # We have to filter out any 'nan' values if a file does not exist values = [get_hrrr_variable(validDATE-timedelta(hours=f), variable, fxx=f, value_only=True, verbose=False)['value'] for f in FORECASTS] forecasts = np.array([i for i in values if np.shape(i) != ()]) if verbose: print(np.shape(forecasts)) # Differences of each consecutive forecast (F00-F01, F01-F02, F03-F04, etc.) #differences = np.array([forecasts[i-1]-forecasts[i] for i in FORECASTS[1:]]) # RMSD between consecutive forecasts and all reference hours (differences matrix) #print(['F%02d-F%02d' % (i, j) for i in FORECASTS for j in FORECASTS if i-j > 0]) # Differences between each forecasts (don't double count) differences_all = np.array([forecasts[i]-forecasts[j] for i in range(len(forecasts)) for j in range(len(forecasts)) if i-j > 0]) RMSD_all = np.sqrt(np.mean(differences_all**2, axis=0)) ## Normalized RMSDs, normalized by range (max-min) # NOTE: Ranges can't be zero or will get a divide by zero error # Get some percentiles for max, min q100, q00 = np.percentile(forecasts, [100, 0], axis=0) maxmin_range = q100-q00 maxmin_range[maxmin_range==0] = np.nan nRMSD_range = RMSD_all/maxmin_range # Grid Lat/Lon and return info latlon = get_hrrr_latlon() latlon['RMSD'] = RMSD_all latlon['variable'] = variable latlon['DATE'] = validDATE latlon['normalized RMSD by range'] = nRMSD_range return latlon
def member_multipro(inputs): """Multiprocessing Inputs for member""" validDATE = inputs[0] f = inputs[1] threshold = inputs[2] variable = inputs[3] threshold = inputs[4] # runDATE = validDATE - timedelta(hours=f) H = get_hrrr_variable(runDATE, variable, fxx=f) return H['value'] >= threshold
def RMSD_range_MP(inputs): """replace the for loop with this multiprocessing method""" validDATE, variable, FORECASTS = inputs # #print(validDATE) if variable.split(':')[0] == 'UVGRD': values = np.array([get_hrrr_variable(validDATE-timedelta(hours=f), variable, fxx=f, value_only=True, verbose=False)['SPEED'] for f in FORECASTS]) forecasts = np.array([i for i in values if np.shape(i) != ()]) else: # We have to filter out any 'nan' values if a file does not exist values = [get_hrrr_variable(validDATE-timedelta(hours=f), variable, fxx=f, value_only=True, verbose=False)['value'] for f in FORECASTS] forecasts = np.array([i for i in values if np.shape(i) != ()]) # print(validDATE, 'Available Grids: %s/%s' % (len(forecasts), len(values))) # # Differences between each forecasts (don't double count) differences_all = np.array([forecasts[i]-forecasts[j] for i in range(len(forecasts)) for j in range(len(forecasts)) if i-j > 0]) # # count = len(differences_all) sum_of_squares = np.sum(differences_all**2, axis=0) # return [count, sum_of_squares]
def load_lats_lons(model): """ Preload the latitude and longitude grid """ if model in ['hrrr', 'hrrrX']: lats, lons = get_hrrr_latlon(DICT=False) elif model == 'hrrrak': AK = get_hrrr_variable(datetime(2018, 2, 24, 15), 'TMP:2 m', fxx=0, model='hrrrak', verbose=False) lats = AK['lat'] lons = AK['lon'] return [lats, lons]
def forecast_data(runDATE, XY_point_pairs, variable='GUST:surface', fxx=range(19)): ''' Return a dictionary of forecasted values for a series of points Input: runDATE - Python Datetime Object for the model run date interested in. XY_point_pairs - XY array coordinate pairs for the points you want. variable - HRRR variable name. fxx - Forecasts to include in the output. ''' ## First, get the latitude and longitude grid points that coorespond to the XY points # Load the HRRR latitude and longitude grid hLATLON = get_hrrr_latlon() # Get the latitude and longitude values for each XY point pair ulon = np.array([hLATLON['lon'][m, n] for m, n in XY_point_pairs]) ulat = np.array([hLATLON['lat'][m, n] for m, n in XY_point_pairs]) # We want to return these header = 'LATITUDE, LONGITUDE' return_this = {'lat': ulat, 'lon': ulon} ## Next, get the forecasts for each of the points for f in fxx: print('\rworking on f%02d' % f, end="") # Get the HRRR File H = get_hrrr_variable(runDATE, variable='GUST:surface', fxx=f, verbose=False) # Get the points from the grid for the transmission lines vv = np.array([H['value'][m, n] for m, n in XY_point_pairs]) # Add that to the array we want to return return_this['f%02d' % f] = vv print('\rDONE!') return return_this
def member_multipro(inputs): """Multiprocessing Inputs for member""" validDATE = inputs[0] f = inputs[1] threshold = inputs[2] variable = inputs[3] radius = inputs[4] runDATE = validDATE - timedelta(hours=f) H = get_hrrr_variable(runDATE, 'REFC:entire', fxx=f) # Apply spatial filters first = ndimage.generic_filter(H['value'], first_filter, footprint=radial_footprint(radius), extra_keywords={'threshold': threshold}) second = ndimage.generic_filter(first, second_filter, footprint=radial_footprint(radius)) # return second
def make_plot(inputs): fire, m, d = inputs plt.cla() plt.clf() Hrefc = get_hrrr_variable(d, 'REFC:entire', verbose=False) Hltng_f01 = get_hrrr_variable(d - timedelta(hours=1), 'LTNG:entire', fxx=1, value_only=True, verbose=False) Hltng_f03 = get_hrrr_variable(d - timedelta(hours=3), 'LTNG:entire', fxx=3, value_only=True, verbose=False) Hltng_f06 = get_hrrr_variable(d - timedelta(hours=6), 'LTNG:entire', fxx=6, value_only=True, verbose=False) Hltng_f09 = get_hrrr_variable(d - timedelta(hours=9), 'LTNG:entire', fxx=9, value_only=True, verbose=False) Hltng_f12 = get_hrrr_variable(d - timedelta(hours=12), 'LTNG:entire', fxx=12, value_only=True, verbose=False) Hltng_f15 = get_hrrr_variable(d - timedelta(hours=15), 'LTNG:entire', fxx=15, value_only=True, verbose=False) Hltng_f18 = get_hrrr_variable(d - timedelta(hours=18), 'LTNG:entire', fxx=18, value_only=True, verbose=False) if d > datetime(2018, 1, 1): glm_files = get_GLM_file_nearesttime(d - timedelta(minutes=30), window=90, verbose=False) GLM = accumulate_GLM(glm_files) fig, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=(20, 10)) else: fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 10)) plt.sca(ax1) plt.title('%s\nValid Date: %s' % (fire['name'], Hrefc['valid'].strftime('%d %b %Y %H:%M UTC'))) m.pcolormesh(Hrefc['lon'], Hrefc['lat'], Hrefc['value'], cmap=c['cmap'], vmin=c['vmin'], vmax=c['vmax']) m.drawstates() m.drawcounties() m.arcgisimage() m.scatter(fire['longitude'], fire['latitude'], latlon=True, facecolors='none', edgecolors='w', s=100) plt.sca(ax2) # Since contours are made ever 0.05, this represents approx. 1/2 flash in the hour per km^2 (becuase 0.05flashes*60min/5min/km^2= 1.2 flashes/km^2) c_interval = .05 plt.title( 'Lightning Threat: F01, F03, F06, F09, F12, F15, F18\n(white, blue, green, coral, orange, purple, pink)' ) m.contour(Hrefc['lon'], Hrefc['lat'], Hltng_f18['value'], latlon=True, colors='deeppink', linewidths=1, levels=np.arange(0, 20, c_interval)) m.contour(Hrefc['lon'], Hrefc['lat'], Hltng_f15['value'], latlon=True, colors='purple', linewidths=1, levels=np.arange(0, 20, c_interval)) m.contour(Hrefc['lon'], Hrefc['lat'], Hltng_f12['value'], latlon=True, colors='darkorange', linewidths=1, levels=np.arange(0, 20, c_interval)) m.contour(Hrefc['lon'], Hrefc['lat'], Hltng_f09['value'], latlon=True, colors='coral', linewidths=1, levels=np.arange(0, 20, c_interval)) m.contour(Hrefc['lon'], Hrefc['lat'], Hltng_f06['value'], latlon=True, colors='lawngreen', linewidths=1, levels=np.arange(0, 20, c_interval)) m.contour(Hrefc['lon'], Hrefc['lat'], Hltng_f03['value'], latlon=True, colors='dodgerblue', linewidths=1, levels=np.arange(0, 20, c_interval)) m.contour(Hrefc['lon'], Hrefc['lat'], Hltng_f01['value'], latlon=True, colors='white', linewidths=1, levels=np.arange(0, 20, c_interval)) m.drawstates() m.drawcounties() m.arcgisimage() m.scatter(fire['longitude'], fire['latitude'], latlon=True, facecolors='none', edgecolors='w', s=100) if d > datetime(2018, 1, 1): plt.sca(ax3) plt.title("GLM flashes for previous hour") m.scatter(GLM['longitude'], GLM['latitude'], marker='+', c='yellow') m.drawstates() m.drawcounties() m.arcgisimage() m.scatter(fire['longitude'], fire['latitude'], latlon=True, facecolors='none', edgecolors='w', s=100) if d > datetime(2018, 1, 1): fig.set_size_inches(20, 10) else: fig.set_size_inches(15, 10) SAVEDIR = '/uufs/chpc.utah.edu/common/home/u0553130/public_html/PhD/JFSP/%s/' % fire[ 'name'].replace(' ', '_') if not os.path.exists(SAVEDIR): os.makedirs(SAVEDIR) plt.savefig(SAVEDIR + '%s' % Hrefc['valid'].strftime('valid_%Y%m%d-%H%M'))
for i, D in enumerate(DATES_LIST)] reduce_CPUs = 2 # don't eat all computer resources cpus = np.minimum(multiprocessing.cpu_count() - reduce_CPUs, len(args)) P = multiprocessing.Pool(cpus) all_variances = P.map(mean_spread_MP, args) P.close() mean_spread = np.sqrt(np.mean(all_variances, axis=0)) return all_variances # ================================================================ # ================================================================ m = draw_HRRR_map() LAND = get_hrrr_variable(datetime(2019, 1, 1), 'LAND:surface') VARS = [ 'GUST:surface', 'UVGRD:10 m', 'REFC:entire', 'LTNG:entire', 'CAPE:surface', 'TMP:2 m', 'DPT:2 m', 'HGT:500' ] seasons = { 'DJF': { 'sDATE': datetime(2017, 12, 1), 'eDATE': datetime(2018, 3, 1) }, 'MAM': { 'sDATE': datetime(2018, 3, 1), 'eDATE': datetime(2018, 6, 1) },
if os.path.exists(FILE): m = np.load(FILE, allow_pickle=True).item() print('loaded %s map from file' % name) else: center = (loc['latitude'], loc['longitude']) m = draw_centermap(center, size=(.3, .3)) # Save the map object for later use np.save(FILE, m) print('saved %s map to file' % name) ## Store map object in location dictionary location[name]['map'] = m ## 3) Create a landuse image for each locations ## Create new figure once a day because landuse ice cover changes on lakes if datetime.utcnow().hour == 0: LU = get_hrrr_variable(datetime.now(), 'VGTYP:surface') LU_cmap = LU_MODIS21() for n, (name, loc) in enumerate(location.items()): print('Generating LandUse map for %s' % name) LU_SAVE = '/uufs/chpc.utah.edu/common/home/u0553130/public_html/oper/HRRR_golf/%s/LandUse.png' % name.replace( ' ', '_') plt.figure(1000) loc['map'].pcolormesh(LU['lon'], LU['lat'], LU['value'], cmap=LU_cmap['cmap'], vmin=LU_cmap['vmin'], vmax=LU_cmap['vmax']) cb = plt.colorbar(orientation='vertical', pad=.01, shrink=.95) cb.set_ticks(np.arange(0.5, len(LU_cmap['labels']) + 1)) cb.ax.set_yticklabels(LU_cmap['labels'])
hours_span = 0 if variable in ['UVGRD:10 m', 'WIND:10 m']: units = r'm s$\mathregular{^{-1}}$' elif variable == 'REFC:entire': units = 'dBZ' nep = NEP(validDATE, threshold=threshold, variable=variable, radius=radius, fxx=fxx, hours_span=hours_span) # Get the values at the valid time H = get_hrrr_variable(validDATE, variable, fxx=0) m = draw_centermap(40.77, -111.96, size=(2.5, 3.5)) plt.figure(figsize=[10, 5]) cm = cm_prob() m.pcolormesh(nep['lon'], nep['lat'], nep['prob'], cmap=cm['cmap'], vmax=cm['vmax'], vmin=cm['vmin'], latlon=True) cb = plt.colorbar(pad=.02, shrink=.8) m.contour(H['lon'],
units = 'dBZ' fxx = 0 cmap = reflect_ncdc() elif variable in ['TMP:2 m']: contours = [4, 20] units = 'C' fxx = 0 cmap = cm_temp() elif variable in ['UVGRD:10 m']: contours = [4, 20] units = r'm s$\mathregular{^{-1}}$' fxx = 0 cmap = cm_wind() # Get HRRR Analysis H = get_hrrr_variable(DATE, variable, fxx=fxx) if variable.split(':')[0] == 'UVGRD': masked = H['SPEED'] else: masked = H['value'] masked = np.ma.array(masked) masked[masked <= 0.02] = np.ma.masked if variable.split(":")[0] in ['TMP', 'DPT']: masked -= 273.15 # Plot Images ## Figure 1: RMSD over ABI and GLM # Plot ABI newmap = m.pcolormesh(ABI['lon'], ABI['lat'],
return {'cmap': reflect_ncdc_coltbl, 'vmin': 0, 'vmax': 80, 'units': 'dBZ'} if __name__ == '__main__': cm_dBZ = reflect_ncdc() import sys sys.path.append('/uufs/chpc.utah.edu/common/home/u0553130/pyBKB_v3') from BB_HRRR.HRRR_Pando import get_hrrr_variable from datetime import datetime import numpy as np import matplotlib.pyplot as plt H = get_hrrr_variable(datetime(2018, 6, 8, 1), 'REFC:entire') dBZ = H['value'] dBZ = np.ma.array(dBZ) dBZ[dBZ == -10] = np.ma.masked plt.figure(1) plt.title('custom cmap reflect_ncdc()') plt.pcolormesh(dBZ, cmap=cm_dBZ) plt.colorbar() plt.figure(2) plt.title('gist_ncar') plt.pcolormesh(dBZ, cmap='gist_ncar') plt.colorbar()
validDATE = datetime(2018, 8, 1, 23) # Isolated Thunderstorms threshold = 35 radius = 9 fxx = range(9, 12) variable = 'REFC:entire' hours_span = 1 tle = TLE(validDATE, threshold=threshold, variable='REFC:entire', radius=radius, fxx=fxx, hours_span=hours_span) # Get the values at the valid time, and plus or minus an hour H = get_hrrr_variable(validDATE, 'REFC:entire', fxx=0) # on the hr m = draw_centermap(40.77, -111.96, size=(2.5, 3.5)) plt.figure(figsize=[10, 5]) cm = cm_prob() m.pcolormesh(tle['lon'], tle['lat'], tle['prob'], cmap=cm['cmap'], vmax=cm['vmax'], vmin=cm['vmin'], latlon=True) cb = plt.colorbar(pad=.02) #cb.set_label('Probability Reflectivity > %s dBZ' % threshold)