def main(): ap = argparse.ArgumentParser() ap.add_argument('--exp-name', dest='exp_name', required=True) args = ap.parse_args() proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) map = Basemap(**proj) fcst_files = glob.glob("/caps1/tsupinie/1km-control-%s/ena???.hdf0*" % args.exp_name) refl_ens_mean, ens_refl, ens_members, ens_times = loadAndInterpolateEnsemble(fcst_files, ['pt', 'p', 'qr', 'qs', 'qh'], computeReflectivity, "/caps1/tsupinie/1km-control-%s/ena001.hdfgrdbas" % args.exp_name, {'z':1000}, agl=True, aggregator=lambda x: np.mean(x, axis=0)) ens_winds, ens_members, ens_times = loadAndInterpolateEnsemble(fcst_files, ['u', 'v', 'w'], getWind, "/caps1/tsupinie/1km-control-%s/ena001.hdfgrdbas" % args.exp_name, {'z':1000}, agl=True) u_mean = ens_winds['u'].mean(axis=0) v_mean = ens_winds['v'].mean(axis=0) w_mean = ens_winds['w'].mean(axis=0) fcst_start_idx = np.where(ens_times == 14400)[0][0] for wdt, t_ens in enumerate(ens_times): if wdt < fcst_start_idx: makeplot(refl_ens_mean[wdt], (u_mean[wdt], v_mean[wdt]), w_mean[wdt], 5, map, goshen_1km_gs, r"Ensemble Mean Reflectivity, $w$, and Wind at $z$ = 1000 m and $t$ = %d s" % t_ens, "images-%s/ens_mean_%06d.png" % (args.exp_name, t_ens)) else: makeplot(refl_ens_mean[wdt], (u_mean[wdt], v_mean[wdt]), w_mean[wdt], 5, map, goshen_1km_gs, r"Ensemble Mean Reflectivity, $w$, and Wind at $z$ = 1000 m and $t$ = %d s" % t_ens, "images-%s/ens_mean_%06d.png" % (args.exp_name, t_ens), box=flux_boxes[args.exp_name][wdt - fcst_start_idx]) return
def main(): _epoch_time = datetime(1970, 1, 1, 0, 0, 0) _initial_time = datetime(2009, 6, 5, 18, 0, 0) - _epoch_time _initial_time = (_initial_time.microseconds + (_initial_time.seconds + _initial_time.days * 24 * 3600) * 1e6) / 1e6 _target_times = [ 1800, 3600, 5400, 7200, 9000, 10800, 11100, 11400, 11700, 12000, 12300, 12600, 12900, 13200, 13500, 13800, 14100, 14400, 14700, 15000, 15300, 15600, 15900, 16200, 16500, 16800, 17100, 17400, 17700, 18000 ] inflow_wd_lbound, inflow_wd_ubound = (100, 240) # bounds = (0, slice(90, 210), slice(40, 160)) # bounds = (0, slice(100, 180), slice(90, 170)) bounds = (0, slice(115, 140), slice(120, 145)) rev_bounds = [ 0 ] rev_bounds.extend(bounds[2:0:-1]) rev_bounds = tuple(rev_bounds) refl_base = "hdf/KCYS/1km/goshen.hdfrefl2d" refl_times = np.array([ int(f[-6:]) for f in glob.glob("%s??????" % refl_base) ]) refl_keep_times = [] refl_data = {} for tt in _target_times: idx = np.argmin(np.abs(refl_times - tt)) if refl_times[idx] > tt and idx > 0: idx -= 1 file_name = "%s%06d" % (refl_base, refl_times[idx]) hdf = nio.open_file(file_name, mode='r', format='hdf') refl_keep_times.append(refl_times[idx]) refl_data[refl_times[idx]] = hdf.variables['refl2d'][rev_bounds] _proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs, bounds=bounds[1:]) # _proj['resolution'] = 'h' map = Basemap(**_proj) ttu_sticknet_obs = cPickle.load(open("ttu_sticknet.pkl", 'r')) psu_straka_obs = cPickle.load(open("psu_straka_mesonet.pkl", 'r')) all_obs = loadObs(['ttu_sticknet.pkl', 'psu_straka_mesonet.pkl'], [ _epoch_time + timedelta(seconds=(_initial_time + t)) for t in _target_times ], map, (goshen_1km_proj['width'], goshen_1km_proj['height']), round_time=True) print all_obs # partitioned_obs = gatherObservations(all_obs, [ _initial_time + t for t in _target_times ]) for time, refl_time in zip([ _initial_time + t for t in _target_times], refl_keep_times): time_str = (_epoch_time + timedelta(seconds=time)).strftime("%d %B %Y %H%M UTC") plot_obs = all_obs[np.where(all_obs['time'] == time)] inflow_idxs = np.where((plot_obs['wind_dir'] >= inflow_wd_lbound) & (plot_obs['wind_dir'] <= inflow_wd_ubound))[0] outflow_idxs = np.array([ idx for idx in range(plot_obs['id'].shape[0]) if idx not in inflow_idxs ]) title = "All MM observations at %s" % time_str file_name = "mm_obs_%06d.png" % (time - _initial_time) plotObservations(plot_obs, map, title, file_name, refl=refl_data[refl_time]) return
def main(): # files = glob.glob("qc/manual/3km/KCYS.20090605.*") files = glob.glob("qc/1km/KCYS.20090605.*") # erf_88D = RadarObsFile("qc/manual/1km/KCYS.20090605.215744") # domain_bounds = (slice(90, 160), slice(80, 150)) domain_bounds = (slice(None), slice(None)) radar_location = (41.56150, -104.298996) proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs, bounds=tuple(reversed(domain_bounds))) map = Basemap(**proj) radar_x, radar_y = map(*reversed(radar_location)) gs_x, gs_y = goshen_1km_gs plot_cmaps = [ (matplotlib.cm.jet, 10, 80), (matplotlib.cm.RdBu, -40, 40), (matplotlib.cm.RdYlGn, 0, 10000), (matplotlib.cm.RdYlGn, 0, 40000), ] plot_titles = [ "Reflectivity (dBZ)", "Radial Velocity (m s$^{-1}$)", "Height (m)", "Range (m)", ] for file in files: print file erf = RadarObsFile(file) xs, ys = np.meshgrid(gs_x * np.arange(erf._n_gridx), gs_y * np.arange(erf._n_gridy)) xs = xs[domain_bounds] xs -= xs[0, 0] ys = ys[domain_bounds] ys -= ys[0, 0] plot_data = [ erf['Z'][(slice(None), ) + domain_bounds], erf['vr'][(slice(None), ) + domain_bounds], erf.heights[(slice(None), ) + domain_bounds], erf.range[(slice(None), ) + domain_bounds], ] file_name_start = file.rfind("/") + 1 radar_id = file[file_name_start:(file_name_start + 4)] time = file[-6:] for ntlt in [ 0 ]: #range(erf._n_tilts): plotRadTilt([ p[ntlt] for p in plot_data ], plot_cmaps, plot_titles, (xs, ys, gs_x, gs_y, map), "%s_enkf_rad_%s.png" % (radar_id, time), base_ref=plot_data[0][0]) return
def main(): ap = argparse.ArgumentParser() ap.add_argument('--exp-name', dest='exp_name', required=True) args = ap.parse_args() proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) map = Basemap(**proj) fcst_files = glob.glob("/caps1/tsupinie/1km-control-%s/ena???.hdf0*" % args.exp_name) refl_ens_mean, ens_refl, ens_members, ens_times = loadAndInterpolateEnsemble( fcst_files, ['pt', 'p', 'qr', 'qs', 'qh'], computeReflectivity, "/caps1/tsupinie/1km-control-%s/ena001.hdfgrdbas" % args.exp_name, {'z': 1000}, agl=True, aggregator=lambda x: np.mean(x, axis=0)) ens_winds, ens_members, ens_times = loadAndInterpolateEnsemble( fcst_files, ['u', 'v', 'w'], getWind, "/caps1/tsupinie/1km-control-%s/ena001.hdfgrdbas" % args.exp_name, {'z': 1000}, agl=True) u_mean = ens_winds['u'].mean(axis=0) v_mean = ens_winds['v'].mean(axis=0) w_mean = ens_winds['w'].mean(axis=0) fcst_start_idx = np.where(ens_times == 14400)[0][0] for wdt, t_ens in enumerate(ens_times): if wdt < fcst_start_idx: makeplot( refl_ens_mean[wdt], (u_mean[wdt], v_mean[wdt]), w_mean[wdt], 5, map, goshen_1km_gs, r"Ensemble Mean Reflectivity, $w$, and Wind at $z$ = 1000 m and $t$ = %d s" % t_ens, "images-%s/ens_mean_%06d.png" % (args.exp_name, t_ens)) else: makeplot( refl_ens_mean[wdt], (u_mean[wdt], v_mean[wdt]), w_mean[wdt], 5, map, goshen_1km_gs, r"Ensemble Mean Reflectivity, $w$, and Wind at $z$ = 1000 m and $t$ = %d s" % t_ens, "images-%s/ens_mean_%06d.png" % (args.exp_name, t_ens), box=flux_boxes[args.exp_name][wdt - fcst_start_idx]) return
def computeUncertainty(var_order, region_order): base_time = datetime(2009, 6, 5, 18, 0, 0) epoch = datetime(1970, 1, 1, 0, 0, 0) base_epoch = (base_time - epoch).total_seconds() times = np.arange(14700, 18300, 300) proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) map = Basemap(**proj) obs_file_names = [ 'psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl', 'soundings_clip.pkl' ] obs = loadObs(obs_file_names, [base_time + timedelta(seconds=int(t)) for t in times], map, (goshen_1km_proj['width'], goshen_1km_proj['height']), sounding_obs=['soundings_clip.pkl']) obs_part = partitionObs(obs, base_epoch) u, v = windDirSpd2UV(obs['wind_dir'], obs['wind_spd']) u_part, v_part = {}, {} for region, reg_obs in obs_part.iteritems(): u_part[region], v_part[region] = windDirSpd2UV( obs_part[region]['wind_dir'], obs_part[region]['wind_spd']) all_uncert = [] def row(all_obs, part_obs, name, units): uncert = uncertainty(all_obs) row_uncert = [uncert] row = "\t" + r"%s (%s) & %.2f" % (name, units, uncert) for region in region_order: uncert = uncertainty(part_obs[region]) row += " & %.2f" % uncert row_uncert.append(uncert) print row + r"\\" print "\t" + r"\hline" return np.array(row_uncert) all_uncert.append( row(obs['temp'], dict([(key, val['temp']) for key, val in obs_part.iteritems()]), '$T$', r'$^{\circ}$F')) all_uncert.append( row(obs['dewp'], dict([(key, val['dewp']) for key, val in obs_part.iteritems()]), '$T_d$', r'$^{\circ}$F')) all_uncert.append(row(u, u_part, '$u$', r'm s$^{-1}$')) all_uncert.append(row(v, v_part, '$v$', r'm s$^{-1}$')) return np.array(all_uncert)
def main(): domain = "3km" assim_obs = cPickle.load(open("assim_obs_%s.pkl" % domain, 'r')) sounding_obs = cPickle.load(open("sounding_obs_da_%s.pkl" % domain, 'r')) radar_location_KCYS = (41.15194, -104.80611) radar_location_KFTG = (39.78667, -104.54583) radar_location_KRIW = (43.06611, -108.47722) radar_location_05XP = (41.56150, -104.298996) if domain == "3km": time_span = "1830-2200 UTC" goshen_proj = goshen_3km_proj goshen_gs = goshen_3km_gs radars = [(radar_location_KCYS, 230000), (radar_location_KFTG, 230000), (radar_location_KRIW, 230000)] profiler_obs = cPickle.load(open("profile_obs_da.pkl", 'r')) scale = 75 elif domain == "1km": time_span = "2100-2200 UTC" goshen_proj = goshen_1km_proj goshen_gs = goshen_1km_gs radars = [(radar_location_KCYS, 230000), (radar_location_KFTG, 230000), (radar_location_05XP, 40000)] profiler_obs = np.empty((0, ), dtype=sounding_obs.dtype) scale = 25 sounding_obs = np.append(sounding_obs, profiler_obs) proj = setupMapProjection(goshen_proj, goshen_gs) map = Basemap(**proj) # assim_obs['sndg'] = sounding_obs plotObservationsComposite(np.concatenate(tuple(assim_obs.values())), map, scale, "Surface Observation Composite (%s)" % time_span, "mm_composite_%s.png" % domain) plotSoundingObservationsComposite( sounding_obs, map, scale, (goshen_proj['width'], goshen_proj['height']), "Sounding Observation Composite (%s)" % time_span, "sndg_composite_%s.png" % domain, radars=radars) return
def main(): proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) map = Basemap(**proj) exp_order = [ 'no-mm', 'mm', 'mod-05XP' ] base_path = "/caps1/tsupinie/1km-control-emeanf/" files = glob.glob("%s/ena???.hdf0*" % base_path) vort_all, ens_members, times = loadAndInterpolateEnsemble(files, [ 'u', 'v', 'dx', 'dy' ], vorticityWinds, "%s/ena001.hdfgrdbas" % base_path, { 'z':2000 }) print vort_all.shape for lde, ens in enumerate(ens_members): for wdt, time in enumerate(times): plotVorticity(vort_all['vort'][lde, wdt], (vort_all['u'][lde, wdt], vort_all['v'][lde, wdt]), map, "Vorticity", "det_vort_%s_%06d.png" % (exp_order[lde], time)) return
def main(): proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) map = Basemap(**proj) exp_order = ['no-mm', 'mm', 'mod-05XP'] base_path = "/caps1/tsupinie/1km-control-emeanf/" files = glob.glob("%s/ena???.hdf0*" % base_path) vort_all, ens_members, times = loadAndInterpolateEnsemble( files, ['u', 'v', 'dx', 'dy'], vorticityWinds, "%s/ena001.hdfgrdbas" % base_path, {'z': 2000}) print vort_all.shape for lde, ens in enumerate(ens_members): for wdt, time in enumerate(times): plotVorticity(vort_all['vort'][lde, wdt], (vort_all['u'][lde, wdt], vort_all['v'][lde, wdt]), map, "Vorticity", "det_vort_%s_%06d.png" % (exp_order[lde], time)) return
def computeUncertainty(var_order, region_order): base_time = datetime(2009, 6, 5, 18, 0, 0) epoch = datetime(1970, 1, 1, 0, 0, 0) base_epoch = (base_time - epoch).total_seconds() times = np.arange(14700, 18300, 300) proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) map = Basemap(**proj) obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl', 'soundings_clip.pkl'] obs = loadObs(obs_file_names, [ base_time + timedelta(seconds=int(t)) for t in times ], map, (goshen_1km_proj['width'], goshen_1km_proj['height']), sounding_obs=['soundings_clip.pkl']) obs_part = partitionObs(obs, base_epoch) u, v = windDirSpd2UV(obs['wind_dir'], obs['wind_spd']) u_part, v_part = {}, {} for region, reg_obs in obs_part.iteritems(): u_part[region], v_part[region] = windDirSpd2UV(obs_part[region]['wind_dir'], obs_part[region]['wind_spd']) all_uncert = [] def row(all_obs, part_obs, name, units): uncert = uncertainty(all_obs) row_uncert = [ uncert ] row = "\t" + r"%s (%s) & %.2f" % (name, units, uncert) for region in region_order: uncert = uncertainty(part_obs[region]) row += " & %.2f" % uncert row_uncert.append(uncert) print row + r"\\" print "\t" + r"\hline" return np.array(row_uncert) all_uncert.append(row(obs['temp'], dict([ (key, val['temp'] ) for key, val in obs_part.iteritems()]), '$T$', r'$^{\circ}$F')) all_uncert.append(row(obs['dewp'], dict([ (key, val['dewp'] ) for key, val in obs_part.iteritems()]), '$T_d$', r'$^{\circ}$F')) all_uncert.append(row(u, u_part, '$u$', r'm s$^{-1}$')) all_uncert.append(row(v, v_part, '$v$', r'm s$^{-1}$')) return np.array(all_uncert)
def main(): domain = "3km" assim_obs = cPickle.load(open("assim_obs_%s.pkl" % domain, 'r')) sounding_obs = cPickle.load(open("sounding_obs_da_%s.pkl" % domain, 'r')) radar_location_KCYS = (41.15194, -104.80611) radar_location_KFTG = (39.78667, -104.54583) radar_location_KRIW = (43.06611, -108.47722) radar_location_05XP = (41.56150, -104.298996) if domain == "3km": time_span = "1830-2200 UTC" goshen_proj = goshen_3km_proj goshen_gs = goshen_3km_gs radars = [ (radar_location_KCYS, 230000), (radar_location_KFTG, 230000), (radar_location_KRIW, 230000) ] profiler_obs = cPickle.load(open("profile_obs_da.pkl", 'r')) scale = 75 elif domain == "1km": time_span = "2100-2200 UTC" goshen_proj = goshen_1km_proj goshen_gs = goshen_1km_gs radars = [ (radar_location_KCYS, 230000), (radar_location_KFTG, 230000), (radar_location_05XP, 40000) ] profiler_obs = np.empty((0,), dtype=sounding_obs.dtype) scale = 25 sounding_obs = np.append(sounding_obs, profiler_obs) proj = setupMapProjection(goshen_proj, goshen_gs) map = Basemap(**proj) # assim_obs['sndg'] = sounding_obs plotObservationsComposite(np.concatenate(tuple(assim_obs.values())), map, scale, "Surface Observation Composite (%s)" % time_span, "mm_composite_%s.png" % domain) plotSoundingObservationsComposite(sounding_obs, map, scale, (goshen_proj['width'], goshen_proj['height']), "Sounding Observation Composite (%s)" % time_span, "sndg_composite_%s.png" % domain, radars=radars) return
def main(): exp_name = "mm" height = 1000 n_ens_members = 40 times = np.arange(14400, 18300, 300) centers = cPickle.load( open("vortex_centers_%s-%dm.pkl" % (exp_name, height), 'r')) lines_x, lines_y = reorganizeCenters(centers, times) #, n_ens_members, times) # lines_x, lines_y = reorganizeCentersDumb(centers, n_ens_members, times) bounds = (slice(100, 180), slice(90, 170)) proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs, bounds) map = Basemap(**proj) gs_x, gs_y = goshen_1km_gs lb_x = bounds[0].start * gs_x lb_y = bounds[1].start * gs_y tornado_track_x, tornado_track_y = map(*zip(*((41.63, -104.383), (41.6134, -104.224)))[::-1]) pylab.plot(tornado_track_x, tornado_track_y, 'r-') for line_x, line_y in zip(lines_x, lines_y): pylab.plot(line_x - lb_x, line_y - lb_y, 'ko-', markersize=2, linewidth=1) map.drawstates(linewidth=1.5) map.drawcountries(linewidth=1.5) map.drawcoastlines(linewidth=1.5) map.readshapefile("countyp020", 'counties', linewidth=0.5) pylab.savefig("vortex_center_swath_%s-%dm.png" % (exp_name, height)) return
def main(): base_time = datetime(2009, 6, 5, 18, 0, 0) epoch = datetime(1970, 1, 1, 0, 0, 0) times_seconds = range(14700, 18300, 300) times = [base_time + timedelta(seconds=t) for t in times_seconds] n_ensemble_members = 40 exp_name = "zupdtpt" # # Set up the basemap grid # proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) map = Basemap(**proj) # # Load and thin all the observed data # obs_file_names = [ 'psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl', 'soundings_clip.pkl' ] all_obs = loadObs(obs_file_names, times, map, (goshen_1km_proj['width'], goshen_1km_proj['height']), sounding_obs=['soundings_clip.pkl']) print all_obs.shape[0] ob_first_char = np.array([id[0] for id in list(all_obs['id'])]) num_psu_obs = len(np.where(ob_first_char == "P")[0]) num_ttu_obs = len( np.where((ob_first_char == "1") | (ob_first_char == "2"))[0]) num_asos_obs = len(np.where((ob_first_char == "K"))[0]) num_sndg_obs = len(np.where(all_obs['obtype'] == "SNDG")[0]) print "Number of NSSL MM obs used:", num_psu_obs print "Number of TTU Sticknet obs used:", num_ttu_obs print "Number of ASOS obs used:", num_asos_obs print "Number of sounding obs used:", num_sndg_obs all_times = [ datetime(1970, 1, 1, 0, 0, 0) + timedelta(seconds=t) for t in all_obs['time'] ] # # Convert the latitude and longitude observations to x and y on the grid. # obs_x, obs_y = map(all_obs['longitude'], all_obs['latitude']) obs_z = all_obs['pres'] * 100 def getObsData(**kwargs): obs = np.empty(kwargs['pt'].shape, dtype=[('u', np.float32), ('v', np.float32), ('pt', np.float32), ('p', np.float32), ('qv', np.float32)]) obs['u'] = kwargs['u'] obs['v'] = kwargs['v'] obs['pt'] = kwargs['pt'] obs['p'] = kwargs['p'] obs['qv'] = kwargs['qv'] return obs obs_vars = ['u', 'v', 't', 'td'] ens_funcs = {'u': uFromU, 'v': vFromV, 't': tempFromPt, 'td': dewpFromQv} obs_funcs = { 'u': uFromWind, 'v': vFromWind, 't': tempFromT, 'td': dewpFromTd } avg_crps_values = {} all_crps_values = {} rank_histograms = {} all_alphas = {} all_betas = {} high_outliers = {} low_outliers = {} for time_sec, time in zip(times_seconds, times): # files = glob.glob("/caps2/tsupinie/1kmf-%s/ena???.hdf%06d" % (exp_name, time_sec)) time_idxs = np.where(all_obs['time'] == (time - epoch).total_seconds()) # # Load all the ensemble members and interpolate them to the observation points. Because of the design of my script, I'm # loading the all the members timestep-by-timestep, but there's no reason you can't load them all at once. See the function # definition for the meaning of all the arguments. # # ens_obs, ens_members, ens_times = loadAndInterpolateEnsemble(files, ['u', 'v', 'pt', 'p', 'qv'], getObsData, "/caps2/tsupinie/1kmf-%s/ena001.hdfgrdbas" % exp_name, # {'z':obs_z[time_idxs], 'y':obs_y[time_idxs], 'x':obs_x[time_idxs]}, agl=False, wrap=True, coords='pres') ens_obs = loadEnsemble("/caps2/tsupinie/1kmf-%s/" % exp_name, n_ensemble_members, [time_sec], (['u', 'v', 'pt', 'p', 'qv'], getObsData), { 'z': obs_z[time_idxs], 'y': obs_y[time_idxs], 'x': obs_x[time_idxs] }, agl=False, wrap=True, coords='pres') # print ens_obs # # All subsequent lines do the verification # for ob_var in obs_vars: time_crps_values = [] ens_ob_var = ens_funcs[ob_var]( **dict([(n, ens_obs[n][:, 0]) for n in ens_obs.dtype.names])) obs = obs_funcs[ob_var](**dict([(n, all_obs[n][time_idxs]) for n in all_obs.dtype.names])) if ob_var not in rank_histograms: rank_histograms[ob_var] = {} all_crps_values[ob_var] = {} all_alphas[ob_var] = {} all_betas[ob_var] = {} high_outliers[ob_var] = {} low_outliers[ob_var] = {} for region in ['inflow', 'outflow', 'sounding']: rank_histograms[ob_var][region] = np.zeros( (ens_obs.shape[0] + 1, ), dtype=int) all_crps_values[ob_var][region] = [] all_alphas[ob_var][region] = [] all_betas[ob_var][region] = [] high_outliers[ob_var][region] = [] low_outliers[ob_var][region] = [] for idx in xrange(obs.shape[-1]): rank_idx = binRank(ens_ob_var[:, idx], obs[idx]) crps, alphas, betas = CRPS(ens_ob_var[:, idx], obs[idx]) high_outlier = heaviside(ens_ob_var[:, idx].max() - obs[idx]) low_outlier = heaviside(ens_ob_var[:, idx].min() - obs[idx]) for region in ['inflow', 'outflow', 'sounding']: if region in inflow_stations[time_sec] and all_obs['id'][ time_idxs][idx] in inflow_stations[time_sec][ region]: # plotCDFs(np.sort(ens_ob_var[:, idx]), obs[idx], "CDFs for Surface %s Observation %d and Forecast" % (ob_var, idx), "crps_cdf_sfc_%s_%03d.png" % (ob_var, idx)) rank_histograms[ob_var][region][rank_idx] += 1 all_crps_values[ob_var][region].append(crps) all_alphas[ob_var][region].append(alphas) all_betas[ob_var][region].append(betas) high_outliers[ob_var][region].append(high_outlier) low_outliers[ob_var][region].append(low_outlier) elif region == "sounding" and all_obs['obtype'][time_idxs][ idx] == "SNDG": # plotCDFs(np.sort(ens_ob_var[:, idx]), obs[idx], "CDFs for Sounding %s Observation %d and Forecast" % (ob_var, idx), "crps_cdf_sndg_%s_%03d.png" % (ob_var, idx)) rank_histograms[ob_var][region][rank_idx] += 1 all_crps_values[ob_var][region].append(crps) all_alphas[ob_var][region].append(alphas) all_betas[ob_var][region].append(betas) high_outliers[ob_var][region].append(high_outlier) low_outliers[ob_var][region].append(low_outlier) time_crps_values.append(crps) try: avg_crps_values[ob_var].append( sum(time_crps_values) / len(time_crps_values)) except KeyError: avg_crps_values[ob_var] = [ sum(time_crps_values) / len(time_crps_values) ] def dictmean(D): all_lists = [] for val in D.itervalues(): all_lists.extend(val) return np.array(all_lists).mean(axis=0) def dictsum(D): all_lists = [] for val in D.itervalues(): all_lists.append(val) return np.array(all_lists).sum(axis=0) def mean(L): return np.array(L).mean(axis=0) if not os.path.exists("images-%s" % exp_name): os.mkdir("images-%s" % exp_name, 0755) cPickle.dump(avg_crps_values, open("%s_crps.pkl" % exp_name, 'w'), -1) cPickle.dump(all_crps_values, open("%s_crps_breakdown.pkl" % exp_name, 'w'), -1) cPickle.dump((all_alphas, all_betas, high_outliers, low_outliers), open("%s_crps_pieces.pkl" % exp_name, 'w'), -1) for ob_var in obs_vars: total_obs = sum([len(v) for v in high_outliers[ob_var].itervalues()]) print total_obs createVerificationGraphs( dictmean(all_alphas[ob_var]), dictmean(all_betas[ob_var]), dictmean(high_outliers[ob_var]), dictmean(low_outliers[ob_var]), dictsum(rank_histograms[ob_var]).astype(float) / total_obs, total_obs, "%s" % ob_var, exp_name) for region in ['inflow', 'outflow', 'sounding']: suffix = "%s_%s" % (ob_var, region) region_obs = len(high_outliers[ob_var][region]) createVerificationGraphs( mean(all_alphas[ob_var][region]), mean(all_betas[ob_var][region]), mean(high_outliers[ob_var][region]), mean(low_outliers[ob_var][region]), rank_histograms[ob_var][region].astype(float) / region_obs, region_obs, suffix, exp_name) pylab.clf() pylab.plot(times_seconds, avg_crps_values[ob_var]) pylab.savefig("crps_avg_%s.png" % ob_var) return
def main(): ap = argparse.ArgumentParser() ap.add_argument('--exp', dest='exp_name', required=True) ap.add_argument('--threshold', dest='threshold', type=int, default=20) args = ap.parse_args() bounds = (slice(100, 180), slice(90, 170)) radar_elev, radar_lat, radar_lon = 1883, 41.151944, -104.806111 proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) threshold = args.threshold exp_name = args.exp_name img_dir = "images-%s/ets_%ddBZ" % (exp_name, threshold) map = Basemap(**proj) radar_x, radar_y = map(radar_lon, radar_lat) obs_base = "hdf/KCYS/1km/goshen.hdfrefl2d" obs_times = np.array([ int(f[-6:]) for f in glob.glob("%s*" % obs_base) ]) fcst_files = glob.glob("/caps1/tsupinie/1km-control-%s/ena???.hdf014[47]00" % exp_name) fcst_files.extend(glob.glob("/caps1/tsupinie/1km-control-%s/ena???.hdf01[5678]?00" % exp_name)) ens_refl, ens_members, ens_times = loadAndInterpolateEnsemble(fcst_files, ['pt', 'p', 'qr', 'qs', 'qh'], computeReflectivity, "/caps1/tsupinie/1km-control-20120712/ena001.hdfgrdbas", {'z_base':radar_elev, 'y_base':radar_y, 'x_base':radar_x, 'elev_angle':0.5}, agl=False, wrap=True)#, aggregator=lambda x: np.mean(x, axis=0)) # ens_refl, ens_members, ens_times = loadAndInterpolateEnsemble(fcst_files, ['pt', 'p', 'qr', 'qs', 'qh'], computeReflectivity, "/caps1/tsupinie/1km-control-20120712/ena001.hdfgrdbas", # {'z_base':radar_elev, 'y_base':radar_y, 'x_base':radar_x, 'elev_angle':0.5}, agl=False, wrap=True) # ens_refl_mean = ens_refl.mean(axis=0) refl_ens_mean = probMatchMean(ens_refl) bounds_rev = [ slice(None), slice(None) ] bounds_rev.extend(bounds[::-1]) bounds_rev = tuple(bounds_rev) # refl_ens_mean = refl_ens_mean[bounds_rev[1:]] # ens_refl = ens_refl[bounds_rev] all_ets = np.empty((len(ens_members), len(ens_times)), dtype=np.float32) all_ets_mean = np.empty((len(ens_times), ), dtype=np.float32) all_confusion = np.empty(ens_refl.shape, dtype=np.int32) all_confusion_mean = np.empty(refl_ens_mean.shape, dtype=np.int32) for wdt, time in enumerate(ens_times): idx = np.argmin(np.abs(obs_times - time)) if obs_times[idx] > time and idx > 0: idx -= 1 bounds_obs = [0] bounds_obs.extend(bounds[::-1]) bounds_obs = tuple(bounds_obs) obs_file_name = "%s%06d" % (obs_base, obs_times[idx]) obs_hdf = nio.open_file(obs_file_name, mode='r', format='hdf') obs_refl = obs_hdf.variables['refl2d'][0] #[bounds_obs] all_ets_mean[wdt], all_confusion_mean[wdt] = ETS(refl_ens_mean[wdt], obs_refl, threshold) gs_x, gs_y = goshen_1km_gs for lde, member in enumerate(ens_members): all_ets[lde, wdt], all_confusion[lde, wdt] = ETS(ens_refl[lde, wdt], obs_refl, threshold) # nx, ny = ens_refl[lde, wdt].shape # xs, ys = np.meshgrid( gs_x * np.arange(nx), gs_y * np.arange(ny) ) # pylab.clf() # pylab.contourf(xs, ys, ens_refl[lde, wdt], levels=np.arange(10, 80, 10)) # pylab.colorbar() # pylab.savefig("sweep_interp_%s_%06d.png" % (member, time)) nx, ny = refl_ens_mean[wdt].shape xs, ys = np.meshgrid( gs_x * np.arange(nx), gs_y * np.arange(ny) ) pylab.clf() pylab.contourf(xs, ys, refl_ens_mean[wdt], levels=np.arange(10, 80, 10)) pylab.colorbar() pylab.savefig("%s/sweep_interp_mean_%06d.png" % (img_dir, time)) cPickle.dump(all_ets_mean, open("%s_%ddBZ.pkl" % (exp_name, threshold), 'w'), -1) time_mean_ets = all_ets.mean(axis=1) sort_mean_idxs = np.argsort(time_mean_ets) pylab.clf() for lde, member in enumerate(ens_members): print sort_mean_idxs[lde] + 1, time_mean_ets[sort_mean_idxs[lde]] pylab.plot(ens_times, all_ets[lde], 'r-', lw=0.75) pylab.plot(ens_times, all_ets_mean, 'k-', lw=1.5) y_lb, y_ub = pylab.ylim() pylab.plot([14400, 14400], [y_lb, y_ub], 'k--', lw=0.5) pylab.ylim([y_lb, y_ub]) pylab.xlim([10800, 18000]) pylab.xlabel("Time (s)") pylab.ylabel("ETS") pylab.savefig("%s/ets_swath_mm.png" % img_dir) pylab.close() for wdt, time in enumerate(ens_times): fudge = 16 if threshold == 20: fudge = 32 plotConfusion(all_confusion_mean[wdt], map, goshen_1km_gs, "Confusion for Reflectivity of the Ensemble Mean at time %06d" % time, "%s/confusion_mean_%06d.png" % (img_dir, time), inset=flux_boxes[exp_name][wdt], fudge=fudge) # for lde, member in enumerate(ens_members): # plotConfusion(all_confusion[lde, wdt], map, goshen_1km_gs, "Confusion for Reflectivity of Member %s at time %06d" % (member, time), "%s/confusion_ena%s_zoom_%06d.png" % (img_dir, member, time)) gc.collect() return
def main(): base_time = datetime(2009, 6, 5, 18, 0, 0) epoch = datetime(1970, 1, 1, 0, 0, 0) times_seconds = range(14700, 18300, 300) times = [ base_time + timedelta(seconds=t) for t in times_seconds ] proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) map = Basemap(**proj) sounding_obs = loadObs(['soundings.pkl'], times, map, sounding_obs=['soundings.pkl']) obs_x, obs_y = map(sounding_obs['longitude'], sounding_obs['latitude']) obs_z = sounding_obs['elevation'] start_time = floor(sounding_obs['time'].min() / 300) * 300 - (base_time - epoch).total_seconds() sonde_ids = np.unique1d(sounding_obs['id']) sondes = {} for id in sonde_ids: sondes[id] = {'obs':[], 'ens':[] } for time in times_seconds[times_seconds.index(start_time):]: time_epoch = time + (base_time - epoch).total_seconds() # time_base = (epoch + timedelta(seconds=time) - base_time).total_seconds() files = glob.glob("/caps1/tsupinie/1km-control-20120712/ena???.hdf%06d" % time) round_times = np.round(sounding_obs['time'] / 300) * 300 time_idxs = np.where(round_times == time_epoch) ens_obs, ens_members, ens_times = loadAndInterpolateEnsemble(files, ['u', 'v', 'pt', 'p', 'qv'], getObsData, "/caps1/tsupinie/1km-control-20120712/ena001.hdfgrdbas", {'z':obs_z[time_idxs], 'y':obs_y[time_idxs], 'x':obs_x[time_idxs]}, agl=False, wrap=True) ens_obs = np.transpose(ens_obs, axes=(2, 0, 1)) for sonde in sonde_ids: sonde_idxs = np.where(sounding_obs['id'][time_idxs] == sonde) sondes[sonde]['obs'].extend(sounding_obs[time_idxs[0][sonde_idxs]]) sondes[sonde]['ens'].extend([ e[:,0] for e in ens_obs[sonde_idxs] ]) for sonde in sonde_ids: ens_obs = np.array(sondes[sonde]['ens'], dtype=sondes[sonde]['ens'][0].dtype) ens_temp = theta2Temperature(pt=ens_obs['pt'], p=ens_obs['p']) ens_dewp = qv2Dewpoint(qv=ens_obs['qv'], p=ens_obs['p']) data_obs = np.array(sondes[sonde]['obs'], dtype=sondes[sonde]['obs'][0].dtype) order = np.argsort(data_obs['time']) time = data_obs['time'][order] - (base_time - epoch).total_seconds() obs_temp = data_obs['temp'][order] + 273.15 obs_dewp = data_obs['dewp'][order] + 273.15 # pylab.figure(figsize=(8, 10), dpi=100) # pylab.axes((0, 0, 1, 1)) pylab.figure() for ens in xrange(ens_obs.shape[1]): # plotSounding(None, t=ens_temp[:, ens][order], td=ens_dewp[:, ens][order], p=ens_obs['p'][:, ens][order] / 100., u=ens_obs['u'][:, ens][order], v=ens_obs['v'][:, ens][order]) pylab.subplot(211) pylab.plot(time, ens_temp[:, ens][order], 'r-', linewidth=0.5) pylab.plot(time, ens_dewp[:, ens][order], 'g-', linewidth=0.5) pylab.subplot(212) pylab.plot(time, ens_obs['p'][:, ens][order] / 100., 'b-', linewidth=0.5) # plotSounding(None, t=obs_temp, td=obs_dewp, p=data_obs['pres'][order], u=np.ones(order.shape), v=np.zeros(order.shape)) pylab.subplot(211) pylab.plot(time, obs_temp, 'k-', linewidth=1.0) pylab.plot(time, obs_dewp, 'k-', linewidth=1.0) pylab.subplot(212) pylab.plot(time, data_obs['pres'][order], 'k-', linewidth=1.0) sonde_name = sonde.replace('/', '_') pylab.savefig("sonde_swath_%s.png" % sonde_name) pylab.close() return
def main(): ap = argparse.ArgumentParser() ap.add_argument('--parameter', dest='parameter', default='vort') ap.add_argument('--height', dest='interp_height', type=float, default=75.) ap.add_argument('--tag', dest='tag', required=True) ap.add_argument('--min-prob', dest='min_prob', type=float, default=0.1) ap.add_argument('--bbox', dest='bbox', action='store_true', default=False) args = ap.parse_args() exp_names = { 'prtrgn=1':"Storm Perturbations Only", 'newbc':"BC: $r_{0h}$ = 12 km", 'r0h=4km':'$r_{0h}$ = 4km', 'snd-no-w':'Sndgs do not update $w$', 'control':'Control ($r_{0h}$ = 6 km, Sndgs update $w$)', 'zupdtpt':'Control', 'z-no-05XP':"No MWR05XP Data", 'z-no-mm-05XP':"No MM or MWR05XP Data", 'z-no-mm':"No MM Data", 'z-no-v2':"No VORTEX2 data", 'z-no-snd':"No Sounding Data", 'bc7dBZ,5ms':r"$r_{0h}$ = 4 km, BC: $\sigma_Z$ = 7 dBZ, $\sigma_{v_r}$ = 5 m s$^{-1}$", 'bcmult=1.03':"BC: Mult. Inflation Factor = 1.03", 'r0h=6km-bc7dBZ,5ms':r"BC: $\sigma_Z$ = 7 dBZ, $\sigma_{v_r}$ = 5 m s$^{-1}$", '5dBZ,3ms-bc7dBZ,5ms':r"$\sigma_Z$ = 5 dBZ, $\sigma_{v_r}$ = 3 m s$^{-1}$", 'ebr':"Modified: Error, BC, and $r_{0h}$", 'no-mm': 'No MM', 'mm':'MM', '05XP':'MM + MWR05XP', 'outer':"Outer Domain" } param = args.parameter interp_height = args.interp_height tag = args.tag if param == 'vort': description = "$\zeta$ > 0.0075 s$^{-1}$" domain_bounds = (slice(90, 160), slice(90, 160)) max_refl_all, max_refl_da, max_refl_fcst = None, None, None try: vortex_centers = None vortex_centers = cPickle.load(open("vortex_centers_%s-%dm.pkl" % (tag, int(interp_height)), 'r')) except IOError: print "Can't find vortex center file for %s, %d m." % (tag, int(interp_height)) vortex_centers = None elif param == 'refl': threshold = 40 description = "$Z$ > %d dBZ" % threshold domain_bounds = (slice(None), slice(None)) # max_refl_all, max_refl_da, max_refl_fcst = cPickle.load(open("max_obs_refl.pkl", 'r')) # max_refl_all = np.where(max_refl_all >= threshold, np.ones(max_refl_all.shape ), np.zeros(max_refl_all.shape )) # max_refl_da = np.where(max_refl_da >= threshold, np.ones(max_refl_da.shape ), np.zeros(max_refl_da.shape )) # max_refl_fcst = np.where(max_refl_fcst >= threshold, np.ones(max_refl_fcst.shape), np.zeros(max_refl_fcst.shape)) max_refl_all, max_refl_da, max_refl_fcst = None, None, None vortex_centers = None elif param == 'w': threshold = 5. description = "$w$ > %d m s$^{-1}$" % threshold domain_bounds = (slice(None), slice(None)) max_refl_all, max_refl_da, max_refl_fcst = None, None, None vortex_centers = None grid_spacing = goshen_1km_gs proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs, domain_bounds[::-1]) map = Basemap(**proj) max_prob = cPickle.load(open("max_%s_prob_%dm_%s.pkl" % (param, interp_height, tag), 'r')) argmax_prob = cPickle.load(open("argmax_%s_prob_%dm_%s.pkl" % (param, interp_height, tag), 'r')) tornado_track = zip(*((41.63,-104.383), (41.6134,-104.224))) if len(max_prob) >= 3: if param == 'w': max_prob_all, max_prob_da, max_prob_fcst, objects = max_prob else: max_prob_all, max_prob_da, max_prob_fcst = max_prob objects = None argmax_prob_all, argmax_prob_da, argmax_prob_fcst = argmax_prob else: if param == 'w': max_prob_all, objects = max_prob else: max_prob_all = max_prob[0] objects = None argmax_prob_all = argmax_prob[0] if param == 'w': bounding_box = pickBox(map(*reversed(tornado_track)), objects) cPickle.dump(bounding_box, open("bbox_%dm_%s.pkl" % (interp_height, tag), 'w'), -1) if args.bbox: bbox_buffer = 10 bbox_offsets = [0, 10, 0] # t, y, x bbox = cPickle.load(open("bbox_2000m_%s.pkl" % tag, 'r')) objects = [ tuple(slice(b.start - bbox_buffer + o, b.stop + bbox_buffer + o) for b, o in zip(bbox, offsets)) ] start = datetime.utcnow() title = r"Probability of %s (%s at $z$ = %d m)" % (description, exp_names[tag], interp_height) lower_bounds = tuple([ b.start * g if b.start else 0 for b, g in zip(domain_bounds, grid_spacing) ]) plotProbability(max_prob_all[domain_bounds], map, lower_bounds, grid_spacing, tornado_track, title, "max_%s_prob_%dm_%s_all.png" % (param, interp_height, tag), obs=max_refl_all, centers=vortex_centers, min_prob=args.min_prob, objects=objects) plotProbability(max_prob_da[domain_bounds], map, lower_bounds, grid_spacing, tornado_track, title, "max_%s_prob_%dm_%s_da.png" % (param, interp_height, tag), obs=max_refl_da, centers=vortex_centers, min_prob=args.min_prob) plotProbability(max_prob_fcst[domain_bounds], map, lower_bounds, grid_spacing, tornado_track, title, "max_%s_prob_%dm_%s_fcst.png" % (param, interp_height, tag), obs=max_refl_fcst, centers=vortex_centers, min_prob=args.min_prob) title = r"Earliest Timing of %s (%s at $z$ = %d m)" % (description, exp_names[tag], interp_height) plotTiming(max_prob_all[domain_bounds], argmax_prob_all[domain_bounds], np.arange(10800, 18300, 300), map, grid_spacing, tornado_track, title, "argmax_%s_prob_%dm_%s_all.png" % (param, interp_height, tag), obs=max_refl_all, centers=vortex_centers, min_prob=args.min_prob) plotTiming(max_prob_da[domain_bounds], argmax_prob_da[domain_bounds], np.arange(10800, 14700, 300), map, grid_spacing, tornado_track, title, "argmax_%s_prob_%dm_%s_da.png" % (param, interp_height, tag), obs=max_refl_da, centers=vortex_centers, min_prob=args.min_prob) plotTiming(max_prob_fcst[domain_bounds], argmax_prob_fcst[domain_bounds], np.arange(14400, 18300, 300), map, grid_spacing, tornado_track, title, "argmax_%s_prob_%dm_%s_fcst.png" % (param, interp_height, tag), obs=max_refl_fcst, centers=vortex_centers, min_prob=args.min_prob) print "Time to plot images:", datetime.utcnow() - start return
def main(): _epoch_time = datetime(1970, 1, 1, 0, 0, 0) _initial_time = datetime(2009, 6, 5, 18, 0, 0) - _epoch_time _initial_time = ( _initial_time.microseconds + (_initial_time.seconds + _initial_time.days * 24 * 3600) * 1e6) / 1e6 _target_times = [ 1800, 3600, 5400, 7200, 9000, 10800, 11100, 11400, 11700, 12000, 12300, 12600, 12900, 13200, 13500, 13800, 14100, 14400, 14700, 15000, 15300, 15600, 15900, 16200, 16500, 16800, 17100, 17400, 17700, 18000 ] inflow_wd_lbound, inflow_wd_ubound = (100, 240) # bounds = (0, slice(90, 210), slice(40, 160)) # bounds = (0, slice(100, 180), slice(90, 170)) bounds = (0, slice(115, 140), slice(120, 145)) rev_bounds = [0] rev_bounds.extend(bounds[2:0:-1]) rev_bounds = tuple(rev_bounds) refl_base = "hdf/KCYS/1km/goshen.hdfrefl2d" refl_times = np.array( [int(f[-6:]) for f in glob.glob("%s??????" % refl_base)]) refl_keep_times = [] refl_data = {} for tt in _target_times: idx = np.argmin(np.abs(refl_times - tt)) if refl_times[idx] > tt and idx > 0: idx -= 1 file_name = "%s%06d" % (refl_base, refl_times[idx]) hdf = nio.open_file(file_name, mode='r', format='hdf') refl_keep_times.append(refl_times[idx]) refl_data[refl_times[idx]] = hdf.variables['refl2d'][rev_bounds] _proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs, bounds=bounds[1:]) # _proj['resolution'] = 'h' map = Basemap(**_proj) ttu_sticknet_obs = cPickle.load(open("ttu_sticknet.pkl", 'r')) psu_straka_obs = cPickle.load(open("psu_straka_mesonet.pkl", 'r')) all_obs = loadObs(['ttu_sticknet.pkl', 'psu_straka_mesonet.pkl'], [ _epoch_time + timedelta(seconds=(_initial_time + t)) for t in _target_times ], map, (goshen_1km_proj['width'], goshen_1km_proj['height']), round_time=True) print all_obs # partitioned_obs = gatherObservations(all_obs, [ _initial_time + t for t in _target_times ]) for time, refl_time in zip([_initial_time + t for t in _target_times], refl_keep_times): time_str = (_epoch_time + timedelta(seconds=time)).strftime("%d %B %Y %H%M UTC") plot_obs = all_obs[np.where(all_obs['time'] == time)] inflow_idxs = np.where((plot_obs['wind_dir'] >= inflow_wd_lbound) & (plot_obs['wind_dir'] <= inflow_wd_ubound))[0] outflow_idxs = np.array([ idx for idx in range(plot_obs['id'].shape[0]) if idx not in inflow_idxs ]) title = "All MM observations at %s" % time_str file_name = "mm_obs_%06d.png" % (time - _initial_time) plotObservations(plot_obs, map, title, file_name, refl=refl_data[refl_time]) return
def main(): ap = argparse.ArgumentParser() ap.add_argument('--exp', dest='exp_name', required=True) ap.add_argument('--threshold', dest='threshold', type=int, default=20) args = ap.parse_args() bounds = (slice(100, 180), slice(90, 170)) radar_elev, radar_lat, radar_lon = 1883, 41.151944, -104.806111 proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) threshold = args.threshold exp_name = args.exp_name img_dir = "images-%s/ets_%ddBZ" % (exp_name, threshold) map = Basemap(**proj) radar_x, radar_y = map(radar_lon, radar_lat) obs_base = "hdf/KCYS/1km/goshen.hdfrefl2d" obs_times = np.array([int(f[-6:]) for f in glob.glob("%s*" % obs_base)]) fcst_files = glob.glob( "/caps1/tsupinie/1km-control-%s/ena???.hdf014[47]00" % exp_name) fcst_files.extend( glob.glob("/caps1/tsupinie/1km-control-%s/ena???.hdf01[5678]?00" % exp_name)) ens_refl, ens_members, ens_times = loadAndInterpolateEnsemble( fcst_files, ['pt', 'p', 'qr', 'qs', 'qh'], computeReflectivity, "/caps1/tsupinie/1km-control-20120712/ena001.hdfgrdbas", { 'z_base': radar_elev, 'y_base': radar_y, 'x_base': radar_x, 'elev_angle': 0.5 }, agl=False, wrap=True) #, aggregator=lambda x: np.mean(x, axis=0)) # ens_refl, ens_members, ens_times = loadAndInterpolateEnsemble(fcst_files, ['pt', 'p', 'qr', 'qs', 'qh'], computeReflectivity, "/caps1/tsupinie/1km-control-20120712/ena001.hdfgrdbas", # {'z_base':radar_elev, 'y_base':radar_y, 'x_base':radar_x, 'elev_angle':0.5}, agl=False, wrap=True) # ens_refl_mean = ens_refl.mean(axis=0) refl_ens_mean = probMatchMean(ens_refl) bounds_rev = [slice(None), slice(None)] bounds_rev.extend(bounds[::-1]) bounds_rev = tuple(bounds_rev) # refl_ens_mean = refl_ens_mean[bounds_rev[1:]] # ens_refl = ens_refl[bounds_rev] all_ets = np.empty((len(ens_members), len(ens_times)), dtype=np.float32) all_ets_mean = np.empty((len(ens_times), ), dtype=np.float32) all_confusion = np.empty(ens_refl.shape, dtype=np.int32) all_confusion_mean = np.empty(refl_ens_mean.shape, dtype=np.int32) for wdt, time in enumerate(ens_times): idx = np.argmin(np.abs(obs_times - time)) if obs_times[idx] > time and idx > 0: idx -= 1 bounds_obs = [0] bounds_obs.extend(bounds[::-1]) bounds_obs = tuple(bounds_obs) obs_file_name = "%s%06d" % (obs_base, obs_times[idx]) obs_hdf = nio.open_file(obs_file_name, mode='r', format='hdf') obs_refl = obs_hdf.variables['refl2d'][0] #[bounds_obs] all_ets_mean[wdt], all_confusion_mean[wdt] = ETS( refl_ens_mean[wdt], obs_refl, threshold) gs_x, gs_y = goshen_1km_gs for lde, member in enumerate(ens_members): all_ets[lde, wdt], all_confusion[lde, wdt] = ETS(ens_refl[lde, wdt], obs_refl, threshold) # nx, ny = ens_refl[lde, wdt].shape # xs, ys = np.meshgrid( gs_x * np.arange(nx), gs_y * np.arange(ny) ) # pylab.clf() # pylab.contourf(xs, ys, ens_refl[lde, wdt], levels=np.arange(10, 80, 10)) # pylab.colorbar() # pylab.savefig("sweep_interp_%s_%06d.png" % (member, time)) nx, ny = refl_ens_mean[wdt].shape xs, ys = np.meshgrid(gs_x * np.arange(nx), gs_y * np.arange(ny)) pylab.clf() pylab.contourf(xs, ys, refl_ens_mean[wdt], levels=np.arange(10, 80, 10)) pylab.colorbar() pylab.savefig("%s/sweep_interp_mean_%06d.png" % (img_dir, time)) cPickle.dump(all_ets_mean, open("%s_%ddBZ.pkl" % (exp_name, threshold), 'w'), -1) time_mean_ets = all_ets.mean(axis=1) sort_mean_idxs = np.argsort(time_mean_ets) pylab.clf() for lde, member in enumerate(ens_members): print sort_mean_idxs[lde] + 1, time_mean_ets[sort_mean_idxs[lde]] pylab.plot(ens_times, all_ets[lde], 'r-', lw=0.75) pylab.plot(ens_times, all_ets_mean, 'k-', lw=1.5) y_lb, y_ub = pylab.ylim() pylab.plot([14400, 14400], [y_lb, y_ub], 'k--', lw=0.5) pylab.ylim([y_lb, y_ub]) pylab.xlim([10800, 18000]) pylab.xlabel("Time (s)") pylab.ylabel("ETS") pylab.savefig("%s/ets_swath_mm.png" % img_dir) pylab.close() for wdt, time in enumerate(ens_times): fudge = 16 if threshold == 20: fudge = 32 plotConfusion( all_confusion_mean[wdt], map, goshen_1km_gs, "Confusion for Reflectivity of the Ensemble Mean at time %06d" % time, "%s/confusion_mean_%06d.png" % (img_dir, time), inset=flux_boxes[exp_name][wdt], fudge=fudge) # for lde, member in enumerate(ens_members): # plotConfusion(all_confusion[lde, wdt], map, goshen_1km_gs, "Confusion for Reflectivity of Member %s at time %06d" % (member, time), "%s/confusion_ena%s_zoom_%06d.png" % (img_dir, member, time)) gc.collect() return
def plot_map(radar_data, grid_spacing, title, file_name, color_bar='refl', topo=None, aux_field=None, vectors=None, obs=None): # bounds = (slice(80, 160), slice(90, 170)) bounds = (slice(None), slice(None)) # bounds = (slice(242, 327), slice(199, 284)) pylab.figure() pylab.subplots_adjust(left=0.02, right=0.98, top=0.90, bottom=0.02) nx, ny = radar_data[bounds[::-1]].shape proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs, bounds=bounds) map = Basemap(**proj) radar_x, radar_y = map([-104.299004], [41.561497]) if topo is not None: topo_data, topo_lats, topo_lons = topo topo_lats, topo_lons = np.meshgrid(topo_lats, topo_lons) topo_x, topo_y = map(topo_lons, topo_lats) map.contourf(topo_x, topo_y, topo_data, cmap=pylab.get_cmap('gray')) if color_bar == 'refl': levels = range(10, 85, 5) color_map = NWSRef #pylab.get_cmap('jet') elif color_bar == 'radv': levels = range(-35, 40, 5) color_map = pylab.get_cmap('RdBu') elif color_bar == 'pt': levels = range(296, 321, 2) color_map = pylab.get_cmap('jet') print nx, ny x, y = np.meshgrid(grid_spacing * np.arange(nx), grid_spacing * np.arange(ny)) map.contourf(x, y, radar_data[bounds[::-1]], levels=levels, cmap=color_map) # map.plot(radar_x, radar_y, 'ko') pylab.colorbar() if aux_field is not None: aux_levels, aux_data = aux_field CS = map.contour(x, y, aux_data[bounds[::-1]], levels=aux_levels, colors='k', lw=0.75) # pylab.clabel(CS, fmt='%.0f', inline_spacing=1, fontsize="12px") drawPolitical(map, scale_len=25) if obs is not None: for stid, ob in obs.iteritems(): potential_temperature = (5. / 9. * (ob['temperature'] - 32) + 273.15) * (29.5250192 / ob['pressure'])**(2. / 7.) ob_x, ob_y = map(ob['Longitude'], ob['Latitude']) ob_ax_x, ob_ax_y = (pylab.gca().transData + pylab.gca().transAxes.inverted()).transform( np.array([ob_x, ob_y])) if ob_ax_x > 0 and ob_ax_x <= 1 and ob_ax_y > 0 and ob_ax_y <= 1: pylab.gca().add_patch( Circle((ob_x, ob_y), 4000, fc='none', ec='k')) pylab.text(ob_x, ob_y, "%5.1f" % potential_temperature, size='x-small', ha='right', va='bottom') if vectors is not None: stride = 4 u, v = vectors pylab.quiver(x[::stride, ::stride], y[::stride, ::stride], u[::stride, ::stride], v[::stride, ::stride]) pylab.title(title) pylab.savefig(file_name) return
def main(): times_sec = range(11100, 14700, 300) run_base_time = datetime(2009, 6, 5, 18, 0, 0) times_dt = [ run_base_time + timedelta(seconds=t) for t in times_sec ] radar_elev, radar_lat, radar_lon = 1883, 41.151944, -104.806111 proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) variables = [ 'pt' ] interp_height = 25 if len(variables) == 2: var1, var2 = variables else: var1 = variables[0] var2 = variables[0] map = Basemap(**proj) radar_x, radar_y = map(radar_lon, radar_lat) obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl', 'soundings_da.pkl'] all_obs = loadObs(obs_file_names, times_dt, map, sounding_obs=['soundings_da.pkl']) print all_obs forecast_base = "/caps1/tsupinie/1km-control-20120712/" grdbas_file_name = "/caps1/tsupinie/1km-control-20120712/ena001.hdfgrdbas" enf_files = glob.glob("%s/enf???.hdf0*" % forecast_base) enf_data, ens_members, ens_times = loadAndInterpolateEnsemble(enf_files, variables, toRecArray, grdbas_file_name, points=None, agl=True, wrap=False) refl_ens_mean, ens_refl, ens_members, ens_times = loadAndInterpolateEnsemble(enf_files, ['pt', 'p', 'qr', 'qs', 'qh'], computeReflectivity, "/caps1/tsupinie/1km-control-20120712/ena001.hdfgrdbas", {'z':interp_height}, agl=True, wrap=False, aggregator=lambda x: np.mean(x, axis=0)) grdbas = nio.open_file(grdbas_file_name, mode='r', format='hdf') axes_msl = { 'z':grdbas.variables['zp'][:], 'y':grdbas.variables['y'][:], 'x':grdbas.variables['x'][:] } axes_agl = { 'z':_makeZCoordsAGL(grdbas.variables['zp'][:]), 'y':grdbas.variables['y'][:], 'x':grdbas.variables['x'][:] } for wdt, time_dt in enumerate(times_dt): print "Working on time %s" % str(time_dt) time_idxs = np.where(all_obs['time'] == (time_dt - datetime(1970, 1, 1, 0, 0, 0)).total_seconds()) cov = loadAndComputeCovariance(all_obs[time_idxs], enf_data[var1][:, wdt], enf_data[var2][:, wdt], map, axes_msl, False) cor = loadAndComputeCovariance(all_obs[time_idxs], enf_data[var1][:, wdt], enf_data[var2][:, wdt], map, axes_msl, True) for ob_idx in xrange(cov.shape[0]): ob = all_obs[time_idxs[0][ob_idx]] interp_cov = interpolate(cov[ob_idx], axes_agl, { 'z':interp_height }) interp_cor = interpolate(cor[ob_idx], axes_agl, { 'z':interp_height }) roi = 50 if ob['obtype'] == "SA": roi = 300 elif ob['obtype'] == "SNDG": obs_x, obs_y = map(ob['longitude'], ob['latitude']) interp_height_msl = interpolate(axes_msl['z'], axes_msl, { 'y':obs_y, 'x':obs_x }, wrap=True)[0] print interp_height_msl r_h = 150. r_v = 6. z_diff = (interp_height_msl - ob['elevation']) / 1000. if np.abs(z_diff) > r_v: roi = 0 else: roi = r_h * np.sqrt(1 - (z_diff / r_v) ** 2) print roi print np.nanmin(interp_cov), np.nanmax(interp_cov) plotCov(interp_cov, refl_ens_mean[wdt], (ob['elevation'], ob['latitude'], ob['longitude']), map, goshen_1km_gs, "cov_%06d_ob%02d.png" % (times_sec[wdt], ob_idx), roi=roi, normalize=(-2.0, 2.0)) plotCov(interp_cor, refl_ens_mean[wdt], (ob['elevation'], ob['latitude'], ob['longitude']), map, goshen_1km_gs, "cor_%06d_ob%02d.png" % (times_sec[wdt], ob_idx), roi=roi) return
def main(): exp_name = "mod-05XP" base_path = "/caps1/tsupinie/1km-control-%s" % exp_name files = glob.glob("%s/ena???.hdf018000" % base_path) radar_elev, radar_lat, radar_lon = 1883, 41.151944, -104.806111 nicknames = ['fcst.wynssl.014400.png', 'fcst.bushnell.014400.png', 'fcst.ncar2.014400.png'] proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) map = Basemap(**proj) # radar_x, radar_y = map(radar_lon, radar_lat) # coords = [] # base_time = datetime(2009, 6, 5, 18, 0, 0) # soundings = loadObs(['soundings_clip.pkl'], [ base_time + timedelta(seconds=18000) ], map, (goshen_1km_proj['width'], goshen_1km_proj['height']), sounding_obs=['soundings_clip.pkl']) # sounding_ids = np.unique1d(soundings['id']) # soundings = soundings[np.where(soundings['id'] == "Bush")] # obs_x, obs_y = map(soundings['longitude'], soundings['latitude']) # obs_p = soundings['pres'] * 100 # ens_obs, ens_members, ens_times = loadAndInterpolateEnsemble(files, ['u', 'v', 'pt', 'p', 'qv'], getObsData, "/caps1/tsupinie/1km-control-mod-05XP/ena001.hdfgrdbas", # {'z':obs_p, 'y':obs_y, 'x':obs_x}, agl=False, wrap=True, coords='pres') # print ens_obs.shape # for snd_id in ['Bush']: # sounding_ids: # sounding_idxs = np.where(soundings['id'] == snd_id)[0] # snd_nickname = snd_id.replace('/', '_') # fcstSounding(ens_obs[:, 0, sounding_idxs], soundings[sounding_idxs], "fcst.%s.png" % snd_nickname) for snd_file, plot_file in zip(['proximity1.pkl', 'proximity2.pkl', 'proximity3.pkl'], nicknames): obs_sounding = cPickle.load(open(snd_file, 'r')) fcstProfile(base_path, obs_sounding, map, plot_file) # coords.append(getReleaseCoords(obs_sounding)) ### Plot the locations of the sounding obs on the probability-matched reflectivity # ens_refl, ens_members, ens_times = loadAndInterpolateEnsemble(files, ['pt', 'p', 'qr', 'qs', 'qh'], computeReflectivity, "%s/ena001.hdfgrdbas" % base_path, # {'z_base':radar_elev, 'y_base':radar_y, 'x_base':radar_x, 'elev_angle':0.5}, agl=False, wrap=True) #, aggregator=lambda x: np.mean(x, axis=0)) # refl_ens_mean = probMatchMean(ens_refl) # print ens_refl.shape # print refl_ens_mean.shape # pylab.figure() # dx, dy = goshen_1km_gs # nx, ny = refl_ens_mean[0].shape # xs, ys = np.meshgrid(dx * np.arange(nx), dy * np.arange(ny)) # pylab.contourf(xs, ys, refl_ens_mean[0], levels=np.arange(10, 80, 10)) # for snd_id, marker in zip(sounding_ids, ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'L', 'M'][:len(sounding_ids)]): # sounding_idxs = np.where(soundings['id'] == snd_id)[0] # lats = soundings['latitude'][sounding_idxs] # lons = soundings['longitude'][sounding_idxs] # order = np.argsort(soundings['pres'][sounding_idxs]) # snd_xs, snd_ys = map(lons[order], lats[order]) # pylab.plot(snd_xs, snd_ys, 'ko-', markersize=2) # if snd_id == "88 a": # pylab.text(snd_xs[-1] - 6000, snd_ys[-1] + 2000, marker, fontsize='x-large', fontweight='bold', ha='right', va='bottom', bbox={'facecolor':'w', 'alpha':0.7}) # else: # pylab.text(snd_xs[-1] - 2000, snd_ys[-1] - 2000, marker, fontsize='x-large', fontweight='bold', ha='right', va='top', bbox={'facecolor':'w', 'alpha':0.7}) ## for coord, name in zip(coords, nicknames): ## snd_x, snd_y = map(*coord) ## pylab.plot(snd_x, snd_y, 'ko') ## pylab.text(snd_x + 1000, snd_y + 1000, name, ha='left', va='bottom') # drawPolitical(map) ## pylab.savefig("fcst_snd_locations.png") # pylab.savefig("snd_locations.png") # pylab.close() return
def plot_map(radar_data, grid_spacing, title, file_name, color_bar='refl', topo=None, aux_field=None, vectors=None, obs=None): # bounds = (slice(80, 160), slice(90, 170)) bounds = (slice(None), slice(None)) # bounds = (slice(242, 327), slice(199, 284)) pylab.figure() pylab.subplots_adjust(left=0.02, right=0.98, top=0.90, bottom=0.02) nx, ny = radar_data[bounds[::-1]].shape proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs, bounds=bounds) map = Basemap(**proj) radar_x, radar_y = map([-104.299004], [41.561497]) if topo is not None: topo_data, topo_lats, topo_lons = topo topo_lats, topo_lons = np.meshgrid(topo_lats, topo_lons) topo_x, topo_y = map(topo_lons, topo_lats) map.contourf(topo_x, topo_y, topo_data, cmap=pylab.get_cmap('gray')) if color_bar == 'refl': levels = range(10, 85, 5) color_map = NWSRef #pylab.get_cmap('jet') elif color_bar == 'radv': levels = range(-35, 40, 5) color_map = pylab.get_cmap('RdBu') elif color_bar == 'pt': levels = range(296, 321, 2) color_map = pylab.get_cmap('jet') print nx, ny x, y = np.meshgrid(grid_spacing * np.arange(nx), grid_spacing * np.arange(ny)) map.contourf(x, y, radar_data[bounds[::-1]], levels=levels, cmap=color_map) # map.plot(radar_x, radar_y, 'ko') pylab.colorbar() if aux_field is not None: aux_levels, aux_data = aux_field CS = map.contour(x, y, aux_data[bounds[::-1]], levels=aux_levels, colors='k', lw=0.75) # pylab.clabel(CS, fmt='%.0f', inline_spacing=1, fontsize="12px") drawPolitical(map, scale_len=25) if obs is not None: for stid, ob in obs.iteritems(): potential_temperature = (5. / 9. * (ob['temperature'] - 32) + 273.15) * (29.5250192 / ob['pressure']) ** (2. / 7.) ob_x, ob_y = map(ob['Longitude'], ob['Latitude']) ob_ax_x, ob_ax_y = (pylab.gca().transData + pylab.gca().transAxes.inverted()).transform(np.array([ob_x, ob_y])) if ob_ax_x > 0 and ob_ax_x <= 1 and ob_ax_y > 0 and ob_ax_y <= 1: pylab.gca().add_patch(Circle((ob_x, ob_y), 4000, fc='none', ec='k')) pylab.text(ob_x, ob_y, "%5.1f" % potential_temperature, size='x-small', ha='right', va='bottom') if vectors is not None: stride = 4 u, v = vectors pylab.quiver(x[::stride, ::stride], y[::stride, ::stride], u[::stride, ::stride], v[::stride, ::stride]) pylab.title(title) pylab.savefig(file_name) return
def main(): times_sec = range(11100, 14700, 300) run_base_time = datetime(2009, 6, 5, 18, 0, 0) times_dt = [run_base_time + timedelta(seconds=t) for t in times_sec] radar_elev, radar_lat, radar_lon = 1883, 41.151944, -104.806111 proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) variables = ['pt'] interp_height = 25 if len(variables) == 2: var1, var2 = variables else: var1 = variables[0] var2 = variables[0] map = Basemap(**proj) radar_x, radar_y = map(radar_lon, radar_lat) obs_file_names = [ 'psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl', 'soundings_da.pkl' ] all_obs = loadObs(obs_file_names, times_dt, map, sounding_obs=['soundings_da.pkl']) print all_obs forecast_base = "/caps1/tsupinie/1km-control-20120712/" grdbas_file_name = "/caps1/tsupinie/1km-control-20120712/ena001.hdfgrdbas" enf_files = glob.glob("%s/enf???.hdf0*" % forecast_base) enf_data, ens_members, ens_times = loadAndInterpolateEnsemble( enf_files, variables, toRecArray, grdbas_file_name, points=None, agl=True, wrap=False) refl_ens_mean, ens_refl, ens_members, ens_times = loadAndInterpolateEnsemble( enf_files, ['pt', 'p', 'qr', 'qs', 'qh'], computeReflectivity, "/caps1/tsupinie/1km-control-20120712/ena001.hdfgrdbas", {'z': interp_height}, agl=True, wrap=False, aggregator=lambda x: np.mean(x, axis=0)) grdbas = nio.open_file(grdbas_file_name, mode='r', format='hdf') axes_msl = { 'z': grdbas.variables['zp'][:], 'y': grdbas.variables['y'][:], 'x': grdbas.variables['x'][:] } axes_agl = { 'z': _makeZCoordsAGL(grdbas.variables['zp'][:]), 'y': grdbas.variables['y'][:], 'x': grdbas.variables['x'][:] } for wdt, time_dt in enumerate(times_dt): print "Working on time %s" % str(time_dt) time_idxs = np.where( all_obs['time'] == (time_dt - datetime(1970, 1, 1, 0, 0, 0)).total_seconds()) cov = loadAndComputeCovariance(all_obs[time_idxs], enf_data[var1][:, wdt], enf_data[var2][:, wdt], map, axes_msl, False) cor = loadAndComputeCovariance(all_obs[time_idxs], enf_data[var1][:, wdt], enf_data[var2][:, wdt], map, axes_msl, True) for ob_idx in xrange(cov.shape[0]): ob = all_obs[time_idxs[0][ob_idx]] interp_cov = interpolate(cov[ob_idx], axes_agl, {'z': interp_height}) interp_cor = interpolate(cor[ob_idx], axes_agl, {'z': interp_height}) roi = 50 if ob['obtype'] == "SA": roi = 300 elif ob['obtype'] == "SNDG": obs_x, obs_y = map(ob['longitude'], ob['latitude']) interp_height_msl = interpolate(axes_msl['z'], axes_msl, { 'y': obs_y, 'x': obs_x }, wrap=True)[0] print interp_height_msl r_h = 150. r_v = 6. z_diff = (interp_height_msl - ob['elevation']) / 1000. if np.abs(z_diff) > r_v: roi = 0 else: roi = r_h * np.sqrt(1 - (z_diff / r_v)**2) print roi print np.nanmin(interp_cov), np.nanmax(interp_cov) plotCov(interp_cov, refl_ens_mean[wdt], (ob['elevation'], ob['latitude'], ob['longitude']), map, goshen_1km_gs, "cov_%06d_ob%02d.png" % (times_sec[wdt], ob_idx), roi=roi, normalize=(-2.0, 2.0)) plotCov(interp_cor, refl_ens_mean[wdt], (ob['elevation'], ob['latitude'], ob['longitude']), map, goshen_1km_gs, "cor_%06d_ob%02d.png" % (times_sec[wdt], ob_idx), roi=roi) return
def main(variable, refl_ens_mean): radar_elev, radar_lat, radar_lon = 1883, 41.151944, -104.806111 proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) map = Basemap(**proj) radar_x, radar_y = map(radar_lon, radar_lat) ena_files = glob.glob("%s/ena???.hdf01[123]*" % analysis_base) #01[123] ena_files.extend(glob.glob("%s/ena???.hdf014[14]*" % analysis_base)) enf_files = glob.glob("%s/enf???.hdf0*" % forecast_base) ena_pt, ens_members, ens_times = loadAndInterpolateEnsemble(ena_files, [variable], lambda **x: x[variable], "/caps1/tsupinie/1km-control-no-ua/ena001.hdfgrdbas", {'z':interp_height}, agl=True, wrap=False) enf_pt, ens_members, ens_times = loadAndInterpolateEnsemble(enf_files, [variable], lambda **x: x[variable], "/caps1/tsupinie/1km-control-20120712/ena001.hdfgrdbas", {'z':interp_height}, agl=True, wrap=False) ena_mean_pt = ena_pt.mean(axis=0) enf_mean_pt = enf_pt.mean(axis=0) ena_sprd_pt = ena_pt.std(axis=0, ddof=1) enf_sprd_pt = enf_pt.std(axis=0, ddof=1) mean_diff = ena_mean_pt - enf_mean_pt sprd_diff = ena_sprd_pt - enf_sprd_pt dx, dy = goshen_1km_gs nx, ny = mean_diff[0].shape xs, ys = np.meshgrid(dx * np.arange(nx), dy * np.arange(ny)) def getLevels(min, max, precision=1): levels_max = ceil(float(max) / precision) * precision + precision levels_min = floor(float(min) / precision) * precision return np.arange(levels_min, levels_max, precision) for wdt, ens_t in enumerate(ens_times): pylab.figure(figsize=(13, 6)) print "Minimum/maximum mean difference:", np.nanmin(mean_diff), np.nanmax(mean_diff) pylab.subplot(121) map.contourf(xs, ys, mean_diff[wdt], levels=getLevels(np.nanmin(mean_diff), np.nanmax(mean_diff), 1)) pylab.colorbar() map.contour(xs, ys, refl_ens_mean[wdt], levels=[20, 40], colors='k') map.drawcoastlines(linewidth=1.5) map.drawcountries(linewidth=1.5) map.drawstates(linewidth=1.0) map.readshapefile('countyp020', 'counties', linewidth=0.5) print "Minimum/maximum spread difference:", np.nanmin(sprd_diff), np.nanmax(sprd_diff) pylab.subplot(122) map.contourf(xs, ys, sprd_diff[wdt], levels=getLevels(np.nanmin(sprd_diff), np.nanmax(sprd_diff), 0.05)) pylab.colorbar() map.contour(xs, ys, refl_ens_mean[wdt], levels=[20, 40], colors='k') map.drawcoastlines(linewidth=1.5) map.drawcountries(linewidth=1.5) map.drawstates(linewidth=1.0) map.readshapefile('countyp020', 'counties', linewidth=0.5) pylab.savefig("mean_sprd_diff_%s_%s_no-rad.png" % (ens_t, variable)) return
def main(): base_time = datetime(2009, 6, 5, 18, 0, 0) epoch = datetime(1970, 1, 1, 0, 0, 0) base_epoch = (base_time - epoch).total_seconds() times_seconds = range(14700, 18300, 300) times = [base_time + timedelta(seconds=t) for t in times_seconds] bounds = (slice(100, 180), slice(90, 170)) rev_bounds = [0] rev_bounds.extend(bounds[::-1]) rev_bounds = tuple(rev_bounds) proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs, bounds=bounds) map = Basemap(**proj) obs_file_names = [ 'psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl', 'soundings_clip.pkl' ] all_obs = loadObs(obs_file_names, times, map, (goshen_1km_proj['width'], goshen_1km_proj['height']), sounding_obs=['soundings_clip.pkl']) refl_base = "hdf/KCYS/1km/goshen.hdfrefl2d" refl_times = np.array( [int(f[-6:]) for f in glob.glob("%s??????" % refl_base)]) refl_keep_times = [] refl_data = {} for tt in times_seconds: idx = np.argmin(np.abs(refl_times - tt)) if refl_times[idx] > tt and idx > 0: idx -= 1 file_name = "%s%06d" % (refl_base, refl_times[idx]) hdf = nio.open_file(file_name, mode='r', format='hdf') refl_keep_times.append(refl_times[idx]) refl_data[tt] = hdf.variables['refl2d'][rev_bounds] for time, reg in inflow_stations.iteritems(): pylab.figure() gs_x, gs_y = goshen_1km_gs nx, ny = refl_data[time].shape xs, ys = np.meshgrid(gs_x * np.arange(nx), gs_y * np.arange(ny)) pylab.contourf(xs, ys, refl_data[time], levels=np.arange(10, 80, 10)) for region, stations in reg.iteritems(): if region != 'sounding': for station in stations: idxs = np.where((all_obs['id'] == station) & (all_obs['time'] == base_epoch + time)) ob_xs, ob_ys = map(all_obs['longitude'][idxs], all_obs['latitude'][idxs]) if region == 'inflow': color = 'r' elif region == 'outflow': color = 'b' wdir = all_obs['wind_dir'][idxs] wspd = all_obs['wind_spd'][idxs] u = -wspd * np.sin(wdir * np.pi / 180.) * 1.94 v = -wspd * np.cos(wdir * np.pi / 180.) * 1.94 pylab.plot(ob_xs, ob_ys, "%so" % color) pylab.barbs(ob_xs, ob_ys, u, v) drawPolitical(map, scale_len=10) pylab.savefig("inflow_stations_%06d.png" % time) pylab.close() return
def main(): base_time = datetime(2009, 6, 5, 18, 0, 0) epoch = datetime(1970, 1, 1, 0, 0, 0) times_seconds = range(14700, 18300, 300) times = [ base_time + timedelta(seconds=t) for t in times_seconds ] n_ensemble_members = 40 exp_name = "zupdtpt" # # Set up the basemap grid # proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) map = Basemap(**proj) # # Load and thin all the observed data # obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl', 'soundings_clip.pkl'] all_obs = loadObs(obs_file_names, times, map, (goshen_1km_proj['width'], goshen_1km_proj['height']), sounding_obs=['soundings_clip.pkl']) print all_obs.shape[0] ob_first_char = np.array([ id[0] for id in list(all_obs['id']) ]) num_psu_obs = len(np.where(ob_first_char == "P")[0]) num_ttu_obs = len(np.where((ob_first_char == "1") | (ob_first_char == "2"))[0]) num_asos_obs = len(np.where((ob_first_char == "K"))[0]) num_sndg_obs = len(np.where(all_obs['obtype'] == "SNDG")[0]) print "Number of NSSL MM obs used:", num_psu_obs print "Number of TTU Sticknet obs used:", num_ttu_obs print "Number of ASOS obs used:", num_asos_obs print "Number of sounding obs used:", num_sndg_obs all_times = [ datetime(1970, 1, 1, 0, 0, 0) + timedelta(seconds=t) for t in all_obs['time'] ] # # Convert the latitude and longitude observations to x and y on the grid. # obs_x, obs_y = map(all_obs['longitude'], all_obs['latitude']) obs_z = all_obs['pres'] * 100 def getObsData(**kwargs): obs = np.empty(kwargs['pt'].shape, dtype=[('u', np.float32), ('v', np.float32), ('pt', np.float32), ('p', np.float32), ('qv', np.float32)]) obs['u'] = kwargs['u'] obs['v'] = kwargs['v'] obs['pt'] = kwargs['pt'] obs['p'] = kwargs['p'] obs['qv'] = kwargs['qv'] return obs obs_vars = ['u', 'v', 't', 'td'] ens_funcs = { 'u':uFromU, 'v':vFromV, 't':tempFromPt, 'td':dewpFromQv } obs_funcs = { 'u':uFromWind, 'v':vFromWind, 't':tempFromT, 'td':dewpFromTd } avg_crps_values = { } all_crps_values = { } rank_histograms = { } all_alphas = { } all_betas = { } high_outliers = { } low_outliers = { } for time_sec, time in zip(times_seconds, times): # files = glob.glob("/caps2/tsupinie/1kmf-%s/ena???.hdf%06d" % (exp_name, time_sec)) time_idxs = np.where(all_obs['time'] == (time - epoch).total_seconds()) # # Load all the ensemble members and interpolate them to the observation points. Because of the design of my script, I'm # loading the all the members timestep-by-timestep, but there's no reason you can't load them all at once. See the function # definition for the meaning of all the arguments. # # ens_obs, ens_members, ens_times = loadAndInterpolateEnsemble(files, ['u', 'v', 'pt', 'p', 'qv'], getObsData, "/caps2/tsupinie/1kmf-%s/ena001.hdfgrdbas" % exp_name, # {'z':obs_z[time_idxs], 'y':obs_y[time_idxs], 'x':obs_x[time_idxs]}, agl=False, wrap=True, coords='pres') ens_obs = loadEnsemble("/caps2/tsupinie/1kmf-%s/" % exp_name, n_ensemble_members, [ time_sec ], (['u', 'v', 'pt', 'p', 'qv'], getObsData), { 'z':obs_z[time_idxs], 'y':obs_y[time_idxs], 'x':obs_x[time_idxs] }, agl=False, wrap=True, coords='pres') # print ens_obs # # All subsequent lines do the verification # for ob_var in obs_vars: time_crps_values = [] ens_ob_var = ens_funcs[ob_var](**dict([ (n, ens_obs[n][:, 0]) for n in ens_obs.dtype.names ])) obs = obs_funcs[ob_var](**dict([ (n, all_obs[n][time_idxs]) for n in all_obs.dtype.names ])) if ob_var not in rank_histograms: rank_histograms[ob_var] = {} all_crps_values[ob_var] = {} all_alphas[ob_var] = {} all_betas[ob_var] = {} high_outliers[ob_var] = {} low_outliers[ob_var] = {} for region in [ 'inflow', 'outflow', 'sounding' ]: rank_histograms[ob_var][region] = np.zeros((ens_obs.shape[0] + 1,), dtype=int) all_crps_values[ob_var][region] = [] all_alphas[ob_var][region] = [] all_betas[ob_var][region] = [] high_outliers[ob_var][region] = [] low_outliers[ob_var][region] = [] for idx in xrange(obs.shape[-1]): rank_idx = binRank(ens_ob_var[:, idx], obs[idx]) crps, alphas, betas = CRPS(ens_ob_var[:, idx], obs[idx]) high_outlier = heaviside(ens_ob_var[:, idx].max() - obs[idx]) low_outlier = heaviside(ens_ob_var[:, idx].min() - obs[idx]) for region in [ 'inflow', 'outflow', 'sounding' ]: if region in inflow_stations[time_sec] and all_obs['id'][time_idxs][idx] in inflow_stations[time_sec][region]: # plotCDFs(np.sort(ens_ob_var[:, idx]), obs[idx], "CDFs for Surface %s Observation %d and Forecast" % (ob_var, idx), "crps_cdf_sfc_%s_%03d.png" % (ob_var, idx)) rank_histograms[ob_var][region][rank_idx] += 1 all_crps_values[ob_var][region].append(crps) all_alphas[ob_var][region].append(alphas) all_betas[ob_var][region].append(betas) high_outliers[ob_var][region].append(high_outlier) low_outliers[ob_var][region].append(low_outlier) elif region == "sounding" and all_obs['obtype'][time_idxs][idx] == "SNDG": # plotCDFs(np.sort(ens_ob_var[:, idx]), obs[idx], "CDFs for Sounding %s Observation %d and Forecast" % (ob_var, idx), "crps_cdf_sndg_%s_%03d.png" % (ob_var, idx)) rank_histograms[ob_var][region][rank_idx] += 1 all_crps_values[ob_var][region].append(crps) all_alphas[ob_var][region].append(alphas) all_betas[ob_var][region].append(betas) high_outliers[ob_var][region].append(high_outlier) low_outliers[ob_var][region].append(low_outlier) time_crps_values.append(crps) try: avg_crps_values[ob_var].append(sum(time_crps_values) / len(time_crps_values)) except KeyError: avg_crps_values[ob_var] = [ sum(time_crps_values) / len(time_crps_values) ] def dictmean(D): all_lists = [] for val in D.itervalues(): all_lists.extend(val) return np.array(all_lists).mean(axis=0) def dictsum(D): all_lists = [] for val in D.itervalues(): all_lists.append(val) return np.array(all_lists).sum(axis=0) def mean(L): return np.array(L).mean(axis=0) if not os.path.exists("images-%s" % exp_name): os.mkdir("images-%s" % exp_name, 0755) cPickle.dump(avg_crps_values, open("%s_crps.pkl" % exp_name, 'w'), -1) cPickle.dump(all_crps_values, open("%s_crps_breakdown.pkl" % exp_name, 'w'), -1) cPickle.dump((all_alphas, all_betas, high_outliers, low_outliers), open("%s_crps_pieces.pkl" % exp_name, 'w'), -1) for ob_var in obs_vars: total_obs = sum([ len(v) for v in high_outliers[ob_var].itervalues() ]) print total_obs createVerificationGraphs(dictmean(all_alphas[ob_var]), dictmean(all_betas[ob_var]), dictmean(high_outliers[ob_var]), dictmean(low_outliers[ob_var]), dictsum(rank_histograms[ob_var]).astype(float) / total_obs, total_obs, "%s" % ob_var, exp_name) for region in [ 'inflow', 'outflow', 'sounding' ]: suffix = "%s_%s" % (ob_var, region) region_obs = len(high_outliers[ob_var][region]) createVerificationGraphs(mean(all_alphas[ob_var][region]), mean(all_betas[ob_var][region]), mean(high_outliers[ob_var][region]), mean(low_outliers[ob_var][region]), rank_histograms[ob_var][region].astype(float) / region_obs, region_obs, suffix, exp_name) pylab.clf() pylab.plot(times_seconds, avg_crps_values[ob_var]) pylab.savefig("crps_avg_%s.png" % ob_var) return
def main(): exp_names = [ "no-mm", "mm", "mod-05XP" ] labels = { "no-mm":"No MM", "mm":"MM", "mod-05XP":"MM + MWR05XP" } parameters = [ 't', 'td', 'u', 'v' ] param_names = { 't':"Temperature", 'td':"Dewpoint", 'u':r'$u$ Wind', 'v':r'$v$ Wind' } units = { 't':r'$^{\circ}$F', 'td':r'$^{\circ}$F', 'u':r'm s$^{-1}$', 'v':r'm s$^{-1}$' } proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) map = Basemap(**proj) times = np.arange(14700, 18300, 300) base_time = datetime(2009, 6, 5, 18, 0, 0) dt_times = [ base_time + timedelta(seconds=int(t)) for t in times ] epoch = datetime(1970, 1, 1, 0, 0, 0) base_epoch = (base_time - epoch).total_seconds() obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl', 'soundings_clip.pkl'] all_obs = loadObs(obs_file_names, dt_times, map, (goshen_1km_proj['width'], goshen_1km_proj['height']), sounding_obs=['soundings_clip.pkl']) ob_nums = [] for t in times: obs_idxs = np.where(all_obs['time'] == base_epoch + t)[0] ob_nums.append(len(obs_idxs)) figures = dict([ (p, pylab.figure()) for p in parameters ]) axes = {} for p in parameters: pylab.figure(figures[p].number) axes[p] = pylab.axes((0.09, 0.12, 0.82, 0.8)) for name in exp_names: crps = cPickle.load(open("%s_crps.pkl" % name, 'r')) for param in parameters: pylab.figure(figures[param].number) pylab.sca(axes[param]) pylab.plot(times, crps[param], label=labels[name]) for param in parameters: pylab.figure(figures[param].number) num_label_trans = transforms.blended_transform_factory(pylab.gca().transData, pylab.gca().transAxes) for t, n_obs in zip(times, ob_nums): pylab.text(t, 0.025, "%d" % n_obs, weight='bold', style='italic', size='xx-large', transform=num_label_trans, ha='center', bbox={'facecolor':'#ffffff', 'alpha':0.7}) pylab.xlim(times.min(), times.max()) lb_y, ub_y = pylab.ylim() pylab.ylim(0, ub_y) pylab.xlabel("Time (UTC)", size='large') pylab.ylabel("CRPS (%s)" % units[param], size='large') pylab.xticks(times, [ (base_time + timedelta(seconds=int(t))).strftime("%H%M") for t in times], rotation=30., size='large') pylab.yticks(size='large') pylab.legend(loc=1) pylab.suptitle("CRPS for %s" % param_names[param]) pylab.savefig("all_crps_%s.png" % param) pylab.close() return
def main(): exp_names = ["no-mm", "mm", "mod-05XP"] labels = {"no-mm": "No MM", "mm": "MM", "mod-05XP": "MM + MWR05XP"} parameters = ['t', 'td', 'u', 'v'] param_names = { 't': "Temperature", 'td': "Dewpoint", 'u': r'$u$ Wind', 'v': r'$v$ Wind' } units = { 't': r'$^{\circ}$F', 'td': r'$^{\circ}$F', 'u': r'm s$^{-1}$', 'v': r'm s$^{-1}$' } proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) map = Basemap(**proj) times = np.arange(14700, 18300, 300) base_time = datetime(2009, 6, 5, 18, 0, 0) dt_times = [base_time + timedelta(seconds=int(t)) for t in times] epoch = datetime(1970, 1, 1, 0, 0, 0) base_epoch = (base_time - epoch).total_seconds() obs_file_names = [ 'psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl', 'soundings_clip.pkl' ] all_obs = loadObs(obs_file_names, dt_times, map, (goshen_1km_proj['width'], goshen_1km_proj['height']), sounding_obs=['soundings_clip.pkl']) ob_nums = [] for t in times: obs_idxs = np.where(all_obs['time'] == base_epoch + t)[0] ob_nums.append(len(obs_idxs)) figures = dict([(p, pylab.figure()) for p in parameters]) axes = {} for p in parameters: pylab.figure(figures[p].number) axes[p] = pylab.axes((0.09, 0.12, 0.82, 0.8)) for name in exp_names: crps = cPickle.load(open("%s_crps.pkl" % name, 'r')) for param in parameters: pylab.figure(figures[param].number) pylab.sca(axes[param]) pylab.plot(times, crps[param], label=labels[name]) for param in parameters: pylab.figure(figures[param].number) num_label_trans = transforms.blended_transform_factory( pylab.gca().transData, pylab.gca().transAxes) for t, n_obs in zip(times, ob_nums): pylab.text(t, 0.025, "%d" % n_obs, weight='bold', style='italic', size='xx-large', transform=num_label_trans, ha='center', bbox={ 'facecolor': '#ffffff', 'alpha': 0.7 }) pylab.xlim(times.min(), times.max()) lb_y, ub_y = pylab.ylim() pylab.ylim(0, ub_y) pylab.xlabel("Time (UTC)", size='large') pylab.ylabel("CRPS (%s)" % units[param], size='large') pylab.xticks(times, [(base_time + timedelta(seconds=int(t))).strftime("%H%M") for t in times], rotation=30., size='large') pylab.yticks(size='large') pylab.legend(loc=1) pylab.suptitle("CRPS for %s" % param_names[param]) pylab.savefig("all_crps_%s.png" % param) pylab.close() return
def main(): base_time = datetime(2009, 6, 5, 18, 0, 0) epoch = datetime(1970, 1, 1, 0, 0, 0) base_epoch = (base_time - epoch).total_seconds() times_seconds = range(14700, 18300, 300) times = [ base_time + timedelta(seconds=t) for t in times_seconds ] bounds = (slice(100, 180), slice(90, 170)) rev_bounds = [ 0 ] rev_bounds.extend(bounds[::-1]) rev_bounds = tuple(rev_bounds) proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs, bounds=bounds) map = Basemap(**proj) obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl', 'soundings_clip.pkl'] all_obs = loadObs(obs_file_names, times, map, (goshen_1km_proj['width'], goshen_1km_proj['height']), sounding_obs=['soundings_clip.pkl']) refl_base = "hdf/KCYS/1km/goshen.hdfrefl2d" refl_times = np.array([ int(f[-6:]) for f in glob.glob("%s??????" % refl_base) ]) refl_keep_times = [] refl_data = {} for tt in times_seconds: idx = np.argmin(np.abs(refl_times - tt)) if refl_times[idx] > tt and idx > 0: idx -= 1 file_name = "%s%06d" % (refl_base, refl_times[idx]) hdf = nio.open_file(file_name, mode='r', format='hdf') refl_keep_times.append(refl_times[idx]) refl_data[tt] = hdf.variables['refl2d'][rev_bounds] for time, reg in inflow_stations.iteritems(): pylab.figure() gs_x, gs_y = goshen_1km_gs nx, ny = refl_data[time].shape xs, ys = np.meshgrid(gs_x * np.arange(nx), gs_y * np.arange(ny)) pylab.contourf(xs, ys, refl_data[time], levels=np.arange(10, 80, 10)) for region, stations in reg.iteritems(): if region != 'sounding': for station in stations: idxs = np.where((all_obs['id'] == station) & (all_obs['time'] == base_epoch + time)) ob_xs, ob_ys = map(all_obs['longitude'][idxs], all_obs['latitude'][idxs]) if region == 'inflow': color='r' elif region == 'outflow': color='b' wdir = all_obs['wind_dir'][idxs] wspd = all_obs['wind_spd'][idxs] u = -wspd * np.sin(wdir * np.pi / 180.) * 1.94 v = -wspd * np.cos(wdir * np.pi / 180.) * 1.94 pylab.plot(ob_xs, ob_ys, "%so" % color) pylab.barbs(ob_xs, ob_ys, u, v) drawPolitical(map, scale_len=10) pylab.savefig("inflow_stations_%06d.png" % time) pylab.close() return
def main(): base_time = datetime(2009, 6, 5, 18, 0, 0) epoch = datetime(1970, 1, 1, 0, 0, 0) times_seconds = range(14700, 18300, 300) times = [base_time + timedelta(seconds=t) for t in times_seconds] proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) map = Basemap(**proj) sounding_obs = loadObs(['soundings.pkl'], times, map, sounding_obs=['soundings.pkl']) obs_x, obs_y = map(sounding_obs['longitude'], sounding_obs['latitude']) obs_z = sounding_obs['elevation'] start_time = floor(sounding_obs['time'].min() / 300) * 300 - ( base_time - epoch).total_seconds() sonde_ids = np.unique1d(sounding_obs['id']) sondes = {} for id in sonde_ids: sondes[id] = {'obs': [], 'ens': []} for time in times_seconds[times_seconds.index(start_time):]: time_epoch = time + (base_time - epoch).total_seconds() # time_base = (epoch + timedelta(seconds=time) - base_time).total_seconds() files = glob.glob( "/caps1/tsupinie/1km-control-20120712/ena???.hdf%06d" % time) round_times = np.round(sounding_obs['time'] / 300) * 300 time_idxs = np.where(round_times == time_epoch) ens_obs, ens_members, ens_times = loadAndInterpolateEnsemble( files, ['u', 'v', 'pt', 'p', 'qv'], getObsData, "/caps1/tsupinie/1km-control-20120712/ena001.hdfgrdbas", { 'z': obs_z[time_idxs], 'y': obs_y[time_idxs], 'x': obs_x[time_idxs] }, agl=False, wrap=True) ens_obs = np.transpose(ens_obs, axes=(2, 0, 1)) for sonde in sonde_ids: sonde_idxs = np.where(sounding_obs['id'][time_idxs] == sonde) sondes[sonde]['obs'].extend(sounding_obs[time_idxs[0][sonde_idxs]]) sondes[sonde]['ens'].extend([e[:, 0] for e in ens_obs[sonde_idxs]]) for sonde in sonde_ids: ens_obs = np.array(sondes[sonde]['ens'], dtype=sondes[sonde]['ens'][0].dtype) ens_temp = theta2Temperature(pt=ens_obs['pt'], p=ens_obs['p']) ens_dewp = qv2Dewpoint(qv=ens_obs['qv'], p=ens_obs['p']) data_obs = np.array(sondes[sonde]['obs'], dtype=sondes[sonde]['obs'][0].dtype) order = np.argsort(data_obs['time']) time = data_obs['time'][order] - (base_time - epoch).total_seconds() obs_temp = data_obs['temp'][order] + 273.15 obs_dewp = data_obs['dewp'][order] + 273.15 # pylab.figure(figsize=(8, 10), dpi=100) # pylab.axes((0, 0, 1, 1)) pylab.figure() for ens in xrange(ens_obs.shape[1]): # plotSounding(None, t=ens_temp[:, ens][order], td=ens_dewp[:, ens][order], p=ens_obs['p'][:, ens][order] / 100., u=ens_obs['u'][:, ens][order], v=ens_obs['v'][:, ens][order]) pylab.subplot(211) pylab.plot(time, ens_temp[:, ens][order], 'r-', linewidth=0.5) pylab.plot(time, ens_dewp[:, ens][order], 'g-', linewidth=0.5) pylab.subplot(212) pylab.plot(time, ens_obs['p'][:, ens][order] / 100., 'b-', linewidth=0.5) # plotSounding(None, t=obs_temp, td=obs_dewp, p=data_obs['pres'][order], u=np.ones(order.shape), v=np.zeros(order.shape)) pylab.subplot(211) pylab.plot(time, obs_temp, 'k-', linewidth=1.0) pylab.plot(time, obs_dewp, 'k-', linewidth=1.0) pylab.subplot(212) pylab.plot(time, data_obs['pres'][order], 'k-', linewidth=1.0) sonde_name = sonde.replace('/', '_') pylab.savefig("sonde_swath_%s.png" % sonde_name) pylab.close() return