def main(): base_path = "/caps1/tsupinie/" n_ens_members = [ 21 ] t_ens_start = 0 t_ens_stop = 0 t_ens_step = 300 times = range(t_ens_start, t_ens_stop + t_ens_step, t_ens_step) ens = loadEnsemble(base_path, n_ens_members, times, (['tsoil', 'qsoil'], toRecArray), z_coord_type="soil") ens_qv = loadEnsemble(base_path, n_ens_members, times, (['qv'], lambda **e: e['qv']), points={'z':75}, agl=True) ens_spread = np.empty(ens.shape[1:], dtype=ens.dtype) ens_mean = np.empty(ens.shape[1:], dtype=ens.dtype) ens_qv_mean = ens_qv.mean(axis=0) for field in ens.dtype.fields.keys(): ens_spread[field] = ens[field].std(axis=0, ddof=1) ens_mean[field] = ens[field].mean(axis=0) base_time = datetime(2009, 6, 5, 18, 0, 0) grid = goshen_3km_grid() for wdt, time in enumerate(times): # try: # mo = ARPSModelObsFile("%s/KCYSan%06d" % (base_path, time)) # except AssertionError: # mo = ARPSModelObsFile("%s/KCYSan%06d" % (base_path, time), mpi_config=(2, 12)) dt = base_time + timedelta(seconds=time) plotSoil(ens_mean['tsoil'][wdt, 0, 1], ens_qv_mean[wdt], grid, "Deep Soil Temperature at %s UTC (nstyp=0)" % dt.strftime("%H%M"), "tsoil_%06d_nstyp=0_deep.png" % time)#, refl=mo['Z'][0]) plotSoil(ens_mean['tsoil'][wdt, 1, 1], ens_qv_mean[wdt], grid, "Deep Soil Temperature at %s UTC (nstyp=1)" % dt.strftime("%H%M"), "tsoil_%06d_nstyp=1_deep.png" % time)#, refl=mo['Z'][0]) plotSoil(ens_mean['tsoil'][wdt, 0, 0], ens_qv_mean[wdt], grid, "Surface Soil Temperature at %s UTC (nstyp=0)" % dt.strftime("%H%M"), "tsoil_%06d_nstyp=0_sfc.png" % time)#, refl=mo['Z'][0]) plotSoil(ens_mean['tsoil'][wdt, 1, 0], ens_qv_mean[wdt], grid, "Surface Soil Temperature at %s UTC (nstyp=1)" % dt.strftime("%H%M"), "tsoil_%06d_nstyp=1_sfc.png" % time)#, refl=mo['Z'][0]) return
def main(): bounds = (slice(100, 180), slice(90, 170)) exp_base = "/caps2/tsupinie/" exp_name = "1kmf-r0h=6km-bc7dBZ,5ms" height = 2000 exp_tag = "-".join(exp_name.split("-")[1:]) n_ens_members = 40 grid = goshen_1km_grid(bounds=bounds) temp = goshen_1km_temporal(start=14400) # proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs, bounds) # map = Basemap(**proj) # fcst_files = glob.glob("%s/%s/ena???.hdf014400" % (exp_base, exp_name)) # fcst_files.extend(glob.glob("%s/1km-control-%s/ena???.hdf01[5678]*" % (exp_base, exp_name))) # vort, ens_members, times = loadAndInterpolateEnsemble(fcst_files, ['u', 'v', 'dx', 'dy'], getVorticity, "%s/%s/ena001.hdfgrdbas" % exp_base, exp_name, { 'z':height }) vort = loadEnsemble("%s/%s" % (exp_base, exp_name), n_ens_members, temp.getTimes(), (['u', 'v', 'p', 'pt', 'z', 'dx', 'dy'], getVorticity), { 'z':height }, agl=True) cms = ClickMaxState("%s-%dm" % (exp_tag, height), vort, n_ens_members, temp, grid) # pylab.plot(np.random.random_sample(10), np.random.random_sample(10), 'ko') # pylab.xlim(0, 1) # pylab.ylim(0, 1) return
def main(): base_path = "/caps2/tsupinie/1kmf-control/" temp = goshen_1km_temporal(start=14400, end=14400) grid = goshen_1km_grid() n_ens_members = 40 np.seterr(all='ignore') ens = loadEnsemble(base_path, [ 11 ], temp.getTimes(), ([ 'pt', 'p' ], computeDensity)) ens = ens[0, 0] zs = decompressVariable(nio.open_file("%s/ena001.hdfgrdbas" % base_path, mode='r', format='hdf').variables['zp']) xs, ys = grid.getXY() xs = xs[np.newaxis, ...].repeat(zs.shape[0], axis=0) ys = ys[np.newaxis, ...].repeat(zs.shape[0], axis=0) eff_buoy = effectiveBuoyancy(ens, (zs, ys, xs), plane={'z':10}) print eff_buoy pylab.figure() pylab.contourf(xs[0], ys[0], eff_buoy[0], cmap=matplotlib.cm.get_cmap('RdBu_r')) pylab.colorbar() grid.drawPolitical() pylab.suptitle("Effective Buoyancy") pylab.savefig("eff_buoy.png") pylab.close() return
def main(): base_path = "/caps2/tsupinie/" ap = argparse.ArgumentParser() ap.add_argument('--exp-name', dest='exp_name', required=True) ap.add_argument('--fcst', dest='fcst', action='store_true') args = ap.parse_args() np.seterr(all='ignore') exp_name = args.exp_name base_path = "/caps2/tsupinie/" data_path = "%s/%s/" % (base_path, exp_name) n_ens_members = 40 temp = goshen_1km_temporal(start=14400, end=14400) grid = goshen_1km_grid() ens_vort = loadEnsemble(data_path, n_ens_members, temp.getTimes(), (['u', 'v', 'dx', 'dy'], getVortUV), {'z': 1000}, agl=True, fcst=args.fcst) print ens_vort.shape fcst_str = "" if args.fcst: fcst_str = "_fcst" cPickle.dump( ens_vort, open("vort_pkl/vorticity%s_%s.pkl" % (fcst_str, exp_name), 'w'), -1) return
def main(): np.seterr(all='ignore') ap = argparse.ArgumentParser() ap.add_argument('--exp-name', dest='exp_name', required=True) args = ap.parse_args() exp_name = args.exp_name base_path = "/caps2/tsupinie/" data_path = "%s/%s/" % (base_path, exp_name) n_ens_members = 40 temp = goshen_1km_temporal(start=14400) grid = goshen_1km_grid() ens_bvg = loadEnsemble( data_path, n_ens_members, temp.getTimes(), (['u', 'v', 'pt', 'p', 'qv', 'qc', 'qi', 'qr', 'qs', 'qh', 'x', 'y' ], computeBaroclinicVortGen), {'z': 500}, agl=True) cPickle.dump(ens_bvg, open("vort_gen_baroc_mean_%s.pkl" % exp_name, 'w'), -1) return
def main(): temp = goshen_1km_temporal(start=14400) #start=10800, end=14400) grid = goshen_1km_grid() #bounds=(slice(242, 327), slice(199, 284))) bounds = grid.getBounds() ubounds = {'u': 10, 'v': 10, 't': 3, 'qv': 0.0035} base_path = "/caps2/tsupinie/" exp_name = "1kmf-mult=1.03" exp_path = "%s%s" % (base_path, exp_name) n_ens_members = 40 ens = loadEnsemble(exp_path, n_ens_members, temp.getTimes(), (['u', 'v', 'pt', 'p', 'qv'], computeSfc), points={'sigma': 2}, agl=True) #, fcst=True) for wdt, (time, time_str) in enumerate( zip(temp, temp.getStrings("%d %B %Y %H%M UTC"))): try: mo = ARPSModelObsFile("%s/KCYSan%06d" % (exp_path, time)) except AssertionError: mo = ARPSModelObsFile("%s/KCYSan%06d" % (exp_path, time), mpi_config=(2, 12)) for field in ens.dtype.fields.iterkeys(): std = ens[field][:, wdt, ...].std(axis=0, ddof=1) plotSpread(std, grid, ubounds[field], "Spread in %s at %s" % (field, time_str), "spread_%s_%s_%06d.png" % (field, exp_name, time), refl=mo['Z'][0]) return
def main(): ap = argparse.ArgumentParser() ap.add_argument('--exp-name', dest='exp_name', required=True) args = ap.parse_args() np.seterr(all='ignore') exp_name = args.exp_name base_path = "/caps2/tsupinie/" data_path = "%s/%s/" % (base_path, exp_name) n_ens_members = 40 temp = goshen_1km_temporal(start=14400) grid = goshen_1km_grid() ens_vg = loadEnsemble(data_path, n_ens_members, temp.getTimes(), (['u', 'v', 'w', 'x', 'y', 'z'], getVGTandUV), {'z':500}, agl=True, buffer=True) ens_vort = compute3DVorticity(vg_tensor=ens_vg) z_stretching = ens_vort['zvort'] * ens_vg['dwdz'] y_stretching = ens_vort['yvort'] * ens_vg['dvdy'] x_stretching = ens_vort['xvort'] * ens_vg['dudx'] horiz_tilting = ens_vort['yvort'] * ens_vg['dwdy'] + ens_vort['xvort'] * ens_vg['dwdx'] horiz_stretching = (ens_vg['u'] * x_stretching + ens_vg['v'] * y_stretching) / np.hypot(ens_vg['u'], ens_vg['v']) cPickle.dump((z_stretching, horiz_tilting, horiz_stretching, ens_vg['u'].mean(axis=0), ens_vg['v'].mean(axis=0)), open("vort_gen_mean_%s.pkl" % exp_name, 'w'), -1) return
def main(): base_path = "/caps2/tsupinie/" ap = argparse.ArgumentParser() ap.add_argument('--exp-name', dest='exp_name', required=True) args = ap.parse_args() np.seterr(all='ignore') exp_name = args.exp_name base_path = "/caps2/tsupinie/" data_path = "%s/%s/" % (base_path, exp_name) n_ens_members = 40 temp = goshen_1km_temporal(start=14400, end=18000) grid = goshen_1km_grid() # Sounding loc: [164124.50758138258, 92544.16037613325] y_snd, x_snd = grid.getXY(164, 103) print x_snd, y_snd ens_hodo = loadEnsemble(data_path, n_ens_members, temp.getTimes(), (['u', 'v'], vectorTuple), { 'x': x_snd, 'y': y_snd }) cPickle.dump(ens_hodo, open("hodo_pkl/%s_hodo.pkl" % exp_name, 'w'), -1) return
def main(): bounds = (slice(100, 180), slice(90, 170)) exp_base = "/caps2/tsupinie/" exp_name = "1kmf-r0h=6km-bc7dBZ,5ms" height = 2000 exp_tag = "-".join(exp_name.split("-")[1:]) n_ens_members = 40 grid = goshen_1km_grid(bounds=bounds) temp = goshen_1km_temporal(start=14400) # proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs, bounds) # map = Basemap(**proj) # fcst_files = glob.glob("%s/%s/ena???.hdf014400" % (exp_base, exp_name)) # fcst_files.extend(glob.glob("%s/1km-control-%s/ena???.hdf01[5678]*" % (exp_base, exp_name))) # vort, ens_members, times = loadAndInterpolateEnsemble(fcst_files, ['u', 'v', 'dx', 'dy'], getVorticity, "%s/%s/ena001.hdfgrdbas" % exp_base, exp_name, { 'z':height }) vort = loadEnsemble("%s/%s" % (exp_base, exp_name), n_ens_members, temp.getTimes(), (['u', 'v', 'p', 'pt', 'z', 'dx', 'dy'], getVorticity), {'z': height}, agl=True) cms = ClickMaxState("%s-%dm" % (exp_tag, height), vort, n_ens_members, temp, grid) # pylab.plot(np.random.random_sample(10), np.random.random_sample(10), 'ko') # pylab.xlim(0, 1) # pylab.ylim(0, 1) return
def main(): base_path = "/caps2/tsupinie/" ap = argparse.ArgumentParser() ap.add_argument('--exp-name', dest='exp_name', required=True) args = ap.parse_args() n_ens_members = 40 exp_name = args.exp_name bounds_obs = (slice(100, 180), slice(90, 170)) grid_obs = goshen_1km_grid(bounds=bounds_obs) bounds = (slice(None), slice(None)) grid = goshen_1km_grid(bounds=bounds) temp = goshen_1km_temporal(start=14400) obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl'] all_obs = loadObs(obs_file_names, temp.getDatetimes(aslist=True), grid_obs, grid_obs.getWidthHeight()) obs_xy = np.vstack(grid(all_obs['longitude'], all_obs['latitude'])).T ens = loadEnsemble("/caps2/tsupinie/%s/" % exp_name, n_ens_members, temp.getTimes(), (['u', 'v', 'pt', 'p', 'qv'], getTempDewpRefl), {'sigma':2}, agl=True, wrap=True) grid_xs, grid_ys = grid.getXY() obs_t_verif = [] for wdt, (time_sec, time_epoch) in enumerate(zip(temp, temp.getEpochs())): try: mo = ARPSModelObsFile("%s/%s/KCYSan%06d" % (base_path, exp_name, time_sec)) except AssertionError: mo = ARPSModelObsFile("%s/%s/KCYSan%06d" % (base_path, exp_name, time_sec), mpi_config=(2, 12)) except: print "Can't load reflectivity ..." mo = {'Z':np.zeros((1, 255, 255), dtype=np.float32)} time_ob_idxs = np.where(all_obs['nom_time'] == time_epoch)[0] time_obs = all_obs[time_ob_idxs] time_obs_xy = obs_xy[time_ob_idxs] obs_intrp = griddata(time_obs_xy, 5. / 9. * (time_obs['temp'] - 32) + 273.15, (grid_xs, grid_ys)) print np.isfinite(obs_intrp).sum() pylab.figure() pylab.contourf(grid_xs, grid_ys, ens['t'][:, wdt].mean(axis=0)[bounds] - obs_intrp, levels=np.arange(-6, 6.5, 0.5), cmap=matplotlib.cm.get_cmap("RdBu_r")) pylab.colorbar() pylab.contour(grid_xs, grid_ys, mo['Z'][0][tuple(reversed(bounds))], levels=np.arange(10, 80, 10), colors='k') grid.drawPolitical() pylab.savefig("obs_verif/obs_%s_t_grid_%06d.png" % (exp_name[5:], time_sec)) pylab.close() obs_t_verif.append(ens['t'][:, wdt].mean(axis=0) - obs_intrp) cPickle.dump(np.array(obs_t_verif), open("obs_verif/obs_verif_%s.pkl" % exp_name, 'w'), -1) return
def main(): base_path = "/caps2/tsupinie/1kmf-control/" temp = goshen_1km_temporal(start=14400, end=14400) grid = goshen_1km_grid() n_ens_members = 40 x_snd, y_snd = grid.getXY(115, 115) ens_anal = loadEnsemble(base_path, n_ens_members, temp.getTimes(), (['u', 'v', 'pt', 'p', 'qv'], getSndParams), {'x':x_snd, 'y':y_snd}, fcst=False) ens_fcst = loadEnsemble(base_path, n_ens_members, temp.getTimes(), (['u', 'v', 'pt', 'p', 'qv'], getSndParams), {'x':x_snd, 'y':y_snd}, fcst=True) robs = RadarObsFile("qc/1km/KCYS.20090605.220000") grdbas = nio.open_file("%s/ena001.hdfgrdbas" % base_path, mode='r', format='hdf') weights = computeInflWeights(grdbas.variables['zp'], robs.heights, grid, robs['Z'] > 20., 6000, 0) ens_mean = np.empty(ens_anal.shape[1:], dtype=ens_anal.dtype) ens_preinfl = np.empty(ens_anal.shape, dtype=ens_anal.dtype) for field in ens_anal.dtype.fields.iterkeys(): ens_mean[field] = ens_anal[field].mean(axis=0) ens_preinfl[field] = undoAdaptiveInfl(ens_fcst[field], ens_anal[field], 0.9) for wdt, (t_ens, time_str) in enumerate(zip(temp, temp.getStrings("%d %B %Y %H%M UTC"))): pylab.figure(figsize=(8, 10)) plotSkewTBackground(pylab.gca()) for n_ens in xrange(n_ens_members): pres_profile = ens_preinfl['p'][n_ens, wdt] temp_profile = theta2Temperature(pt=ens_preinfl['pt'][n_ens, wdt], p=ens_preinfl['p'][n_ens, wdt]) dewp_profile = qv2Dewpoint(qv=ens_preinfl['qv'][n_ens, wdt], p=ens_preinfl['p'][n_ens, wdt]) if np.any(temp_profile < dewp_profile): print "Dewpoint greater than temperature at t=%06d, n=%03d" % (t_ens, n_ens + 1), np.where(temp_profile < dewp_profile) plotProfile(temp_profile - 273.15, pres_profile / 100., color='r', linewidth=0.5) plotProfile(dewp_profile - 273.15, pres_profile / 100., color='g', linewidth=0.5) mean_pres_profile = ens_mean['p'][wdt] mean_temp_profile = theta2Temperature(pt=ens_mean['pt'][wdt], p=ens_mean['p'][wdt]) mean_dewp_profile = qv2Dewpoint(qv=ens_mean['qv'][wdt], p=ens_mean['p'][wdt]) plotProfile(mean_temp_profile - 273.15, mean_pres_profile / 100., color='k', linewidth=1.5) plotProfile(mean_dewp_profile - 273.15, mean_pres_profile / 100., color='k', linewidth=1.5) pylab.suptitle("Ensemble Soundings at %s" % time_str) pylab.savefig("fcst_snd_1kmf-control_preinfl_%06d.png" % t_ens) return
def main(): np.seterr(all='ignore') ap = argparse.ArgumentParser() ap.add_argument('--exp-name', dest='exp_name', required=True) args = ap.parse_args() exp_name = args.exp_name base_path = "/caps2/tsupinie/" data_path = "%s/%s/" % (base_path, exp_name) n_ens_members = 40 temp = goshen_1km_temporal(start=14400) grid = goshen_1km_grid() ens_bvg = loadEnsemble(data_path, n_ens_members, temp.getTimes(), (['u', 'v', 'pt', 'p', 'qv', 'qc', 'qi', 'qr', 'qs', 'qh', 'x', 'y'], computeBaroclinicVortGen), {'z':500}, agl=True) cPickle.dump(ens_bvg, open("vort_gen_baroc_mean_%s.pkl" % exp_name, 'w'), -1) return
def createDeviations(args): np.seterr(all='ignore') exp_name = args.exp_name base_path = "/caps2/tsupinie/" data_path = "%s/%s/" % (base_path, exp_name) n_ens_members = 40 temp = goshen_1km_temporal(start=args.ens_time, end=args.ens_time) grid = goshen_1km_grid() ens = loadEnsemble(data_path, n_ens_members, temp.getTimes(), ([ 'u', 'v', 'pt' ], toRecArray)) ens_dev = np.empty(ens.shape[:1], dtype=ens.dtype) for field in ens.dtype.fields.iterkeys(): ens_mean = ens[field].mean(axis=0) print field for lde in range(ens.shape[0]): mem_dev = np.sqrt(np.mean((ens[field][lde] - ens_mean) ** 2)) / np.std(ens_mean, ddof=1) cPickle.dump(ens_dev, open("closest/ens_dev_%s.pkl" % exp_name, 'w'), -1) return
def main(): ap = argparse.ArgumentParser() ap.add_argument('--exp-name', dest='exp_name', required=True) args = ap.parse_args() bounds = (slice(90, 170), slice(100, 180)) base_path = "/caps2/tsupinie/1kmf-%s" % args.exp_name temp = goshen_1km_temporal(start=14400) n_ens_members = 40 # files = glob.glob("%s/ena???.hdf014[47]00" % base_path) # files.extend(glob.glob("%s/ena???.hdf01[5678]?00" % base_path)) # files = glob.glob("%s/ena???.hdf01[0123]*" % base_path) # files.extend(glob.glob("%s/ena???.hdf014100" % base_path)) ens_vort = loadEnsemble(base_path, n_ens_members, temp.getTimes(), (['u', 'v', 'dx', 'dy'], computeVorticity)) # ens_vort = loadEnsemble(base_path, n_ens_members, temp.getTimes(), (['w'], lambda **k: k['w'])) full_bounds = [slice(None)] * 3 full_bounds.extend(bounds) print ens_vort.shape # time_height = (ens_vort[full_bounds].max(axis=-1).max(axis=-1) >= 0.015).sum(axis=0) / float(ens_vort.shape[0]) time_height = ens_vort[full_bounds].max(axis=-1).max(axis=-1).mean(axis=0) print time_height.shape cPickle.dump(time_height, open("vort_time_height_%s.pkl" % args.exp_name, 'w'), -1) plotTimeHeight(time_height, temp.getTimes(), r"Time-Height plot of maximum $\zeta$", "vort_time_height_%s.png" % args.exp_name) return
def main(): temp = goshen_1km_temporal(start=14400) #start=10800, end=14400) grid = goshen_1km_grid() #bounds=(slice(242, 327), slice(199, 284))) bounds = grid.getBounds() ubounds = {'u':10, 'v':10, 't':3, 'qv':0.0035} base_path = "/caps2/tsupinie/" exp_name = "1kmf-mult=1.03" exp_path = "%s%s" % (base_path, exp_name) n_ens_members = 40 ens = loadEnsemble(exp_path, n_ens_members, temp.getTimes(), (['u', 'v', 'pt', 'p', 'qv'], computeSfc), points={'sigma':2}, agl=True) #, fcst=True) for wdt, (time, time_str) in enumerate(zip(temp, temp.getStrings("%d %B %Y %H%M UTC"))): try: mo = ARPSModelObsFile("%s/KCYSan%06d" % (exp_path, time)) except AssertionError: mo = ARPSModelObsFile("%s/KCYSan%06d" % (exp_path, time), mpi_config=(2, 12)) for field in ens.dtype.fields.iterkeys(): std = ens[field][:, wdt, ...].std(axis=0, ddof=1) plotSpread(std, grid, ubounds[field], "Spread in %s at %s" % (field, time_str), "spread_%s_%s_%06d.png" % (field, exp_name, time), refl=mo['Z'][0]) return
def main(): ap = argparse.ArgumentParser() ap.add_argument('--exp-name', dest='exp_name', required=True) args = ap.parse_args() np.seterr(all='ignore') exp_name = args.exp_name base_path = "/caps2/tsupinie/" data_path = "%s/%s/" % (base_path, exp_name) n_ens_members = 40 temp = goshen_1km_temporal(start=14400) grid = goshen_1km_grid() ens_vg = loadEnsemble(data_path, n_ens_members, temp.getTimes(), (['u', 'v', 'w', 'x', 'y', 'z'], getVGTandUV), {'z': 500}, agl=True, buffer=True) ens_vort = compute3DVorticity(vg_tensor=ens_vg) z_stretching = ens_vort['zvort'] * ens_vg['dwdz'] y_stretching = ens_vort['yvort'] * ens_vg['dvdy'] x_stretching = ens_vort['xvort'] * ens_vg['dudx'] horiz_tilting = ens_vort['yvort'] * ens_vg['dwdy'] + ens_vort[ 'xvort'] * ens_vg['dwdx'] horiz_stretching = (ens_vg['u'] * x_stretching + ens_vg['v'] * y_stretching) / np.hypot(ens_vg['u'], ens_vg['v']) cPickle.dump((z_stretching, horiz_tilting, horiz_stretching, ens_vg['u'].mean(axis=0), ens_vg['v'].mean(axis=0)), open("vort_gen_mean_%s.pkl" % exp_name, 'w'), -1) return
def createDeviations(args): np.seterr(all='ignore') exp_name = args.exp_name base_path = "/caps2/tsupinie/" data_path = "%s/%s/" % (base_path, exp_name) n_ens_members = 40 temp = goshen_1km_temporal(start=args.ens_time, end=args.ens_time) grid = goshen_1km_grid() ens = loadEnsemble(data_path, n_ens_members, temp.getTimes(), (['u', 'v', 'pt'], toRecArray)) ens_dev = np.empty(ens.shape[:1], dtype=ens.dtype) for field in ens.dtype.fields.iterkeys(): ens_mean = ens[field].mean(axis=0) print field for lde in range(ens.shape[0]): mem_dev = np.sqrt(np.mean( (ens[field][lde] - ens_mean)**2)) / np.std(ens_mean, ddof=1) cPickle.dump(ens_dev, open("closest/ens_dev_%s.pkl" % exp_name, 'w'), -1) return
def main(): base_path = "/caps2/tsupinie/" ap = argparse.ArgumentParser() ap.add_argument('--exp-name', dest='exp_name', required=True) args = ap.parse_args() np.seterr(all='ignore') exp_name = args.exp_name base_path = "/caps2/tsupinie/" data_path = "%s/%s/" % (base_path, exp_name) n_ens_members = 40 temp = goshen_1km_temporal(start=14400, end=18000) grid = goshen_1km_grid() # Sounding loc: [164124.50758138258, 92544.16037613325] y_snd, x_snd = grid.getXY(164, 103) print x_snd, y_snd ens_hodo = loadEnsemble(data_path, n_ens_members, temp.getTimes(), (['u', 'v'], vectorTuple), {'x':x_snd, 'y':y_snd}) cPickle.dump(ens_hodo, open("hodo_pkl/%s_hodo.pkl" % exp_name, 'w'), -1) return
def main(): base_path = "/caps2/tsupinie/1kmf-control/" temp = goshen_1km_temporal(start=14400, end=14400) grid = goshen_1km_grid() n_ens_members = 40 np.seterr(all='ignore') ens = loadEnsemble(base_path, [11], temp.getTimes(), (['pt', 'p'], computeDensity)) ens = ens[0, 0] zs = decompressVariable( nio.open_file("%s/ena001.hdfgrdbas" % base_path, mode='r', format='hdf').variables['zp']) xs, ys = grid.getXY() xs = xs[np.newaxis, ...].repeat(zs.shape[0], axis=0) ys = ys[np.newaxis, ...].repeat(zs.shape[0], axis=0) eff_buoy = effectiveBuoyancy(ens, (zs, ys, xs), plane={'z': 10}) print eff_buoy pylab.figure() pylab.contourf(xs[0], ys[0], eff_buoy[0], cmap=matplotlib.cm.get_cmap('RdBu_r')) pylab.colorbar() grid.drawPolitical() pylab.suptitle("Effective Buoyancy") pylab.savefig("eff_buoy.png") pylab.close() return
def main(): base_path = "/caps2/tsupinie/" exp_names = { '1kmf-sndr0h=25km':"CTRL", '1kmf-zs25-no-05XP':"NO_MWR", '1kmf-zs25-no-mm-05XP':"NO_MWR_MM", '1kmf-zs25-no-mm':"NO_MM", '1kmf-z-no-snd':"NO_SND", '1kmf-z-no-v2':"NO_V2" } experiments = [ '1kmf-sndr0h=25km', '1kmf-zs25-no-05XP', '1kmf-z-no-snd', '1kmf-zs25-no-mm' ] #, '1kmf-zs25-no-mm-05XP', '1kmf-z-no-v2'] min_ens_members = [ 17, 31, 31, 31 ] #, 36, 1 ] bounds_1sthalf = (slice(105, 160), slice(105, 160)) bounds_2ndhalf = (slice(130, 185), slice(105, 160)) grid_1 = goshen_1km_grid(bounds=bounds_1sthalf) grid_2 = goshen_1km_grid(bounds=bounds_2ndhalf) bounds_1sthalf = grid_1.getBounds() bounds_2ndhalf = grid_2.getBounds() xs_1, ys_1 = grid_1.getXY() xs_2, ys_2 = grid_2.getXY() thin_factor = 2 thin = tuple([slice(None, None, thin_factor)] * 2) temp = goshen_1km_temporal(start=14400) wind = {} for exp, min_ens in zip(experiments, min_ens_members): wind[exp] = loadEnsemble("%s%s" % (base_path, exp), [ min_ens ], temp.getTimes(), (['u', 'v', 'w'], toRecArray), {'z':1000}, agl=True)[0] def modelSubplotFactory(exp, min_ens, time_sec): wdt = temp.getTimes().index(time_sec) def doSubplot(multiplier=1.0, layout=(-1, -1)): if time_sec < 16200: xs, ys = xs_1, ys_1 domain_bounds = bounds_1sthalf grid = grid_1 else: xs, ys = xs_2, ys_2 domain_bounds = bounds_2ndhalf grid = grid_2 try: mo = ARPSModelObsFile("%s/%s/KCYS%03dan%06d" % (base_path, exp, min_ens, time_sec)) except AssertionError: mo = ARPSModelObsFile("%s/%s/KCYS%03dan%06d" % (base_path, exp, min_ens, time_sec), mpi_config=(2, 12)) except: print "Can't load reflectivity ..." mo = {'Z':np.zeros((1, 255, 255), dtype=np.float32)} pylab.contour(xs, ys, wind[exp]['w'][wdt][domain_bounds], levels=np.arange(2, 102, 2), styles='-', colors='k') pylab.contour(xs, ys, wind[exp]['w'][wdt][domain_bounds], levels=np.arange(-100, 0, 2), styles='--', colors='k') pylab.quiver(xs[thin], ys[thin], wind[exp]['u'][wdt][domain_bounds][thin], wind[exp]['v'][wdt][domain_bounds][thin]) pylab.contourf(xs, ys, mo['Z'][0][domain_bounds], levels=np.arange(10, 85, 5), cmap=NWSRef, zorder=-10) grid.drawPolitical(scale_len=10) row, col = layout if col == 1: pylab.text(-0.075, 0.5, exp_names[exp], transform=pylab.gca().transAxes, rotation=90, ha='center', va='center', size=12 * multiplier) return doSubplot def obsSubplotFactory(time): def doSubplot(multiplier=1.0, layout=(-1, -1)): if (time - datetime(2009, 6, 5, 18, 0, 0)).total_seconds() < 16200: xs, ys = xs_1, ys_1 domain_bounds = bounds_1sthalf grid = grid_1 else: xs, ys = xs_2, ys_2 domain_bounds = bounds_2ndhalf grid = grid_2 try: erf = RadarObsFile("qc/1km/KCYS.20090605.%s" % time.strftime("%H%M%S")) except: print "Can't load reflectivity ..." erf = {'Z':np.zeros((1, 255, 255), dtype=np.float32)} pylab.contourf(xs, ys, erf['Z'][0][domain_bounds], levels=np.arange(10, 85, 5), cmap=NWSRef) grid.drawPolitical(scale_len=10) row, col = layout if col == 1: pylab.text(-0.075, 0.5, "Observations", transform=pylab.gca().transAxes, rotation=90, ha='center', va='center', size=12 * multiplier) pylab.text(0.5, 1.075, "%s UTC" % time.strftime("%H%M"), transform=pylab.gca().transAxes, ha='center', va='center', size=12 * multiplier) return doSubplot pylab.figure(figsize=(18, 21)) pylab.subplots_adjust(left=0.025, bottom=0.1, right=0.875, top=0.975, hspace=0.05, wspace=0.05) subplots = [] for dt in temp.getDatetimes(aslist=True)[::4]: subplots.append(obsSubplotFactory(dt)) for exp, min_ens in zip(experiments, min_ens_members): for time_sec in temp.getTimes()[::4]: subplots.append(modelSubplotFactory(exp, min_ens, time_sec)) publicationFigure(subplots, (5, 4), corner='ur', colorbar=("Reflectivity (dBZ)", "%d", np.arange(10, 85, 5))) pylab.savefig("closest/bref.png") pylab.close() return
def main(): base_time = datetime(2009, 6, 5, 18, 0, 0) epoch = datetime(1970, 1, 1, 0, 0, 0) times_seconds = range(14700, 18300, 300) times = [base_time + timedelta(seconds=t) for t in times_seconds] n_ensemble_members = 40 exp_name = "zupdtpt" # # Set up the basemap grid # proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) map = Basemap(**proj) # # Load and thin all the observed data # obs_file_names = [ 'psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl', 'soundings_clip.pkl' ] all_obs = loadObs(obs_file_names, times, map, (goshen_1km_proj['width'], goshen_1km_proj['height']), sounding_obs=['soundings_clip.pkl']) print all_obs.shape[0] ob_first_char = np.array([id[0] for id in list(all_obs['id'])]) num_psu_obs = len(np.where(ob_first_char == "P")[0]) num_ttu_obs = len( np.where((ob_first_char == "1") | (ob_first_char == "2"))[0]) num_asos_obs = len(np.where((ob_first_char == "K"))[0]) num_sndg_obs = len(np.where(all_obs['obtype'] == "SNDG")[0]) print "Number of NSSL MM obs used:", num_psu_obs print "Number of TTU Sticknet obs used:", num_ttu_obs print "Number of ASOS obs used:", num_asos_obs print "Number of sounding obs used:", num_sndg_obs all_times = [ datetime(1970, 1, 1, 0, 0, 0) + timedelta(seconds=t) for t in all_obs['time'] ] # # Convert the latitude and longitude observations to x and y on the grid. # obs_x, obs_y = map(all_obs['longitude'], all_obs['latitude']) obs_z = all_obs['pres'] * 100 def getObsData(**kwargs): obs = np.empty(kwargs['pt'].shape, dtype=[('u', np.float32), ('v', np.float32), ('pt', np.float32), ('p', np.float32), ('qv', np.float32)]) obs['u'] = kwargs['u'] obs['v'] = kwargs['v'] obs['pt'] = kwargs['pt'] obs['p'] = kwargs['p'] obs['qv'] = kwargs['qv'] return obs obs_vars = ['u', 'v', 't', 'td'] ens_funcs = {'u': uFromU, 'v': vFromV, 't': tempFromPt, 'td': dewpFromQv} obs_funcs = { 'u': uFromWind, 'v': vFromWind, 't': tempFromT, 'td': dewpFromTd } avg_crps_values = {} all_crps_values = {} rank_histograms = {} all_alphas = {} all_betas = {} high_outliers = {} low_outliers = {} for time_sec, time in zip(times_seconds, times): # files = glob.glob("/caps2/tsupinie/1kmf-%s/ena???.hdf%06d" % (exp_name, time_sec)) time_idxs = np.where(all_obs['time'] == (time - epoch).total_seconds()) # # Load all the ensemble members and interpolate them to the observation points. Because of the design of my script, I'm # loading the all the members timestep-by-timestep, but there's no reason you can't load them all at once. See the function # definition for the meaning of all the arguments. # # ens_obs, ens_members, ens_times = loadAndInterpolateEnsemble(files, ['u', 'v', 'pt', 'p', 'qv'], getObsData, "/caps2/tsupinie/1kmf-%s/ena001.hdfgrdbas" % exp_name, # {'z':obs_z[time_idxs], 'y':obs_y[time_idxs], 'x':obs_x[time_idxs]}, agl=False, wrap=True, coords='pres') ens_obs = loadEnsemble("/caps2/tsupinie/1kmf-%s/" % exp_name, n_ensemble_members, [time_sec], (['u', 'v', 'pt', 'p', 'qv'], getObsData), { 'z': obs_z[time_idxs], 'y': obs_y[time_idxs], 'x': obs_x[time_idxs] }, agl=False, wrap=True, coords='pres') # print ens_obs # # All subsequent lines do the verification # for ob_var in obs_vars: time_crps_values = [] ens_ob_var = ens_funcs[ob_var]( **dict([(n, ens_obs[n][:, 0]) for n in ens_obs.dtype.names])) obs = obs_funcs[ob_var](**dict([(n, all_obs[n][time_idxs]) for n in all_obs.dtype.names])) if ob_var not in rank_histograms: rank_histograms[ob_var] = {} all_crps_values[ob_var] = {} all_alphas[ob_var] = {} all_betas[ob_var] = {} high_outliers[ob_var] = {} low_outliers[ob_var] = {} for region in ['inflow', 'outflow', 'sounding']: rank_histograms[ob_var][region] = np.zeros( (ens_obs.shape[0] + 1, ), dtype=int) all_crps_values[ob_var][region] = [] all_alphas[ob_var][region] = [] all_betas[ob_var][region] = [] high_outliers[ob_var][region] = [] low_outliers[ob_var][region] = [] for idx in xrange(obs.shape[-1]): rank_idx = binRank(ens_ob_var[:, idx], obs[idx]) crps, alphas, betas = CRPS(ens_ob_var[:, idx], obs[idx]) high_outlier = heaviside(ens_ob_var[:, idx].max() - obs[idx]) low_outlier = heaviside(ens_ob_var[:, idx].min() - obs[idx]) for region in ['inflow', 'outflow', 'sounding']: if region in inflow_stations[time_sec] and all_obs['id'][ time_idxs][idx] in inflow_stations[time_sec][ region]: # plotCDFs(np.sort(ens_ob_var[:, idx]), obs[idx], "CDFs for Surface %s Observation %d and Forecast" % (ob_var, idx), "crps_cdf_sfc_%s_%03d.png" % (ob_var, idx)) rank_histograms[ob_var][region][rank_idx] += 1 all_crps_values[ob_var][region].append(crps) all_alphas[ob_var][region].append(alphas) all_betas[ob_var][region].append(betas) high_outliers[ob_var][region].append(high_outlier) low_outliers[ob_var][region].append(low_outlier) elif region == "sounding" and all_obs['obtype'][time_idxs][ idx] == "SNDG": # plotCDFs(np.sort(ens_ob_var[:, idx]), obs[idx], "CDFs for Sounding %s Observation %d and Forecast" % (ob_var, idx), "crps_cdf_sndg_%s_%03d.png" % (ob_var, idx)) rank_histograms[ob_var][region][rank_idx] += 1 all_crps_values[ob_var][region].append(crps) all_alphas[ob_var][region].append(alphas) all_betas[ob_var][region].append(betas) high_outliers[ob_var][region].append(high_outlier) low_outliers[ob_var][region].append(low_outlier) time_crps_values.append(crps) try: avg_crps_values[ob_var].append( sum(time_crps_values) / len(time_crps_values)) except KeyError: avg_crps_values[ob_var] = [ sum(time_crps_values) / len(time_crps_values) ] def dictmean(D): all_lists = [] for val in D.itervalues(): all_lists.extend(val) return np.array(all_lists).mean(axis=0) def dictsum(D): all_lists = [] for val in D.itervalues(): all_lists.append(val) return np.array(all_lists).sum(axis=0) def mean(L): return np.array(L).mean(axis=0) if not os.path.exists("images-%s" % exp_name): os.mkdir("images-%s" % exp_name, 0755) cPickle.dump(avg_crps_values, open("%s_crps.pkl" % exp_name, 'w'), -1) cPickle.dump(all_crps_values, open("%s_crps_breakdown.pkl" % exp_name, 'w'), -1) cPickle.dump((all_alphas, all_betas, high_outliers, low_outliers), open("%s_crps_pieces.pkl" % exp_name, 'w'), -1) for ob_var in obs_vars: total_obs = sum([len(v) for v in high_outliers[ob_var].itervalues()]) print total_obs createVerificationGraphs( dictmean(all_alphas[ob_var]), dictmean(all_betas[ob_var]), dictmean(high_outliers[ob_var]), dictmean(low_outliers[ob_var]), dictsum(rank_histograms[ob_var]).astype(float) / total_obs, total_obs, "%s" % ob_var, exp_name) for region in ['inflow', 'outflow', 'sounding']: suffix = "%s_%s" % (ob_var, region) region_obs = len(high_outliers[ob_var][region]) createVerificationGraphs( mean(all_alphas[ob_var][region]), mean(all_betas[ob_var][region]), mean(high_outliers[ob_var][region]), mean(low_outliers[ob_var][region]), rank_histograms[ob_var][region].astype(float) / region_obs, region_obs, suffix, exp_name) pylab.clf() pylab.plot(times_seconds, avg_crps_values[ob_var]) pylab.savefig("crps_avg_%s.png" % ob_var) return
def main(): base_path = "/caps2/tsupinie/" exp_names = { '1kmf-sndr0h=25km': "CTRL", '1kmf-zs25-no-05XP': "NO_MWR", '1kmf-zs25-no-mm-05XP': "NO_MWR_MM", '1kmf-zs25-no-mm': "NO_MM", '1kmf-z-no-snd': "NO_SND", '1kmf-z-no-v2': "NO_V2" } experiments = [ '1kmf-sndr0h=25km', '1kmf-zs25-no-05XP', '1kmf-z-no-snd', '1kmf-zs25-no-mm' ] #, '1kmf-zs25-no-mm-05XP', '1kmf-z-no-v2'] min_ens_members = [17, 31, 31, 31] #, 36, 1 ] bounds_1sthalf = (slice(105, 160), slice(105, 160)) bounds_2ndhalf = (slice(130, 185), slice(105, 160)) grid_1 = goshen_1km_grid(bounds=bounds_1sthalf) grid_2 = goshen_1km_grid(bounds=bounds_2ndhalf) bounds_1sthalf = grid_1.getBounds() bounds_2ndhalf = grid_2.getBounds() xs_1, ys_1 = grid_1.getXY() xs_2, ys_2 = grid_2.getXY() thin_factor = 2 thin = tuple([slice(None, None, thin_factor)] * 2) temp = goshen_1km_temporal(start=14400) wind = {} for exp, min_ens in zip(experiments, min_ens_members): wind[exp] = loadEnsemble("%s%s" % (base_path, exp), [min_ens], temp.getTimes(), (['u', 'v', 'w'], toRecArray), {'z': 1000}, agl=True)[0] def modelSubplotFactory(exp, min_ens, time_sec): wdt = temp.getTimes().index(time_sec) def doSubplot(multiplier=1.0, layout=(-1, -1)): if time_sec < 16200: xs, ys = xs_1, ys_1 domain_bounds = bounds_1sthalf grid = grid_1 else: xs, ys = xs_2, ys_2 domain_bounds = bounds_2ndhalf grid = grid_2 try: mo = ARPSModelObsFile("%s/%s/KCYS%03dan%06d" % (base_path, exp, min_ens, time_sec)) except AssertionError: mo = ARPSModelObsFile("%s/%s/KCYS%03dan%06d" % (base_path, exp, min_ens, time_sec), mpi_config=(2, 12)) except: print "Can't load reflectivity ..." mo = {'Z': np.zeros((1, 255, 255), dtype=np.float32)} pylab.contour(xs, ys, wind[exp]['w'][wdt][domain_bounds], levels=np.arange(2, 102, 2), styles='-', colors='k') pylab.contour(xs, ys, wind[exp]['w'][wdt][domain_bounds], levels=np.arange(-100, 0, 2), styles='--', colors='k') pylab.quiver(xs[thin], ys[thin], wind[exp]['u'][wdt][domain_bounds][thin], wind[exp]['v'][wdt][domain_bounds][thin]) pylab.contourf(xs, ys, mo['Z'][0][domain_bounds], levels=np.arange(10, 85, 5), cmap=NWSRef, zorder=-10) grid.drawPolitical(scale_len=10) row, col = layout if col == 1: pylab.text(-0.075, 0.5, exp_names[exp], transform=pylab.gca().transAxes, rotation=90, ha='center', va='center', size=12 * multiplier) return doSubplot def obsSubplotFactory(time): def doSubplot(multiplier=1.0, layout=(-1, -1)): if (time - datetime(2009, 6, 5, 18, 0, 0)).total_seconds() < 16200: xs, ys = xs_1, ys_1 domain_bounds = bounds_1sthalf grid = grid_1 else: xs, ys = xs_2, ys_2 domain_bounds = bounds_2ndhalf grid = grid_2 try: erf = RadarObsFile("qc/1km/KCYS.20090605.%s" % time.strftime("%H%M%S")) except: print "Can't load reflectivity ..." erf = {'Z': np.zeros((1, 255, 255), dtype=np.float32)} pylab.contourf(xs, ys, erf['Z'][0][domain_bounds], levels=np.arange(10, 85, 5), cmap=NWSRef) grid.drawPolitical(scale_len=10) row, col = layout if col == 1: pylab.text(-0.075, 0.5, "Observations", transform=pylab.gca().transAxes, rotation=90, ha='center', va='center', size=12 * multiplier) pylab.text(0.5, 1.075, "%s UTC" % time.strftime("%H%M"), transform=pylab.gca().transAxes, ha='center', va='center', size=12 * multiplier) return doSubplot pylab.figure(figsize=(18, 21)) pylab.subplots_adjust(left=0.025, bottom=0.1, right=0.875, top=0.975, hspace=0.05, wspace=0.05) subplots = [] for dt in temp.getDatetimes(aslist=True)[::4]: subplots.append(obsSubplotFactory(dt)) for exp, min_ens in zip(experiments, min_ens_members): for time_sec in temp.getTimes()[::4]: subplots.append(modelSubplotFactory(exp, min_ens, time_sec)) publicationFigure(subplots, (5, 4), corner='ur', colorbar=("Reflectivity (dBZ)", "%d", np.arange(10, 85, 5))) pylab.savefig("closest/bref.png") pylab.close() return
def main(): np.seterr(all='ignore') ap = argparse.ArgumentParser() ap.add_argument('--data-path', dest='data_path', default="/caps2/tsupinie/") ap.add_argument('--exp-name', dest='exp_name', required=True) args = ap.parse_args() exp_base = args.data_path exp_name = args.exp_name n_ensemble_members = 40 # base_time = datetime(2009, 6, 5, 18, 0, 0) # epoch = datetime(1970, 1, 1, 0, 0, 0) # base_epoch = (base_time - epoch).total_seconds() # # sec_times = np.arange(14400, 18300, 300) # times = [ base_time + timedelta(seconds=int(t)) for t in sec_times ] temp = goshen_1km_temporal(start=14400) bounds = (slice(100, 180), slice(90, 170)) # bounds = (slice(None), slice(None)) # proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs, bounds) # map = Basemap(**proj) grid = goshen_1km_grid(bounds=bounds) obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl'] all_obs = loadObs(obs_file_names, temp.getDatetimes(aslist=True), grid, grid.getWidthHeight()) obs_x, obs_y = grid(all_obs['longitude'], all_obs['latitude']) obs_z = all_obs['elevation'] grdbas_file = "%s/1kmf-%s/ena001.hdfgrdbas" % (exp_base, exp_name) grdbas = nio.open_file(grdbas_file, mode='r', format='hdf') y_axis = decompressVariable(grdbas.variables['y'])[bounds[1]] x_axis = decompressVariable(grdbas.variables['x'])[bounds[0]] y_axis = y_axis - y_axis[0] x_axis = x_axis - x_axis[0] # fcst_files = glob.glob("%s/1km-control-%s/ena???.hdf014[47]00" % (exp_base, exp_name)) # fcst_files.extend(glob.glob("%s/1km-control-%s/ena???.hdf01[5678]*" % (exp_base, exp_name))) # ens, ens_members, ens_times = loadAndInterpolateEnsemble(fcst_files, ['u', 'v', 'pt', 'p', 'qv', 'qr', 'qs', 'qh'], getTempDewpRefl, grdbas_file, # {'z':10}, agl=True, wrap=True) ens = loadEnsemble("/caps2/tsupinie/1kmf-%s/" % exp_name, n_ensemble_members, temp.getTimes(), (['u', 'v', 'pt', 'p', 'qv', 'qr', 'qs', 'qh'], getTempDewpRefl), {'sigma':2}, agl=True, wrap=True) # ens = ens[:, :, 2, :, :] ens_slice = [ slice(None), slice(None) ] ens_slice.extend(bounds[::-1]) ens_mean = np.empty(ens.shape[1:], dtype=[('t', np.float32), ('td', np.float32), ('u', np.float32), ('v', np.float32)]) for var in ens_mean.dtype.fields.iterkeys(): ens_mean[var] = ens[var].mean(axis=0) cPickle.dump(ens_mean, open("cold_pool_1kmf-%s.pkl" % exp_name, 'w'), -1) ens = ens[tuple(ens_slice)] ens_refl = np.maximum(0, ens['refl'].mean(axis=0)) #probMatchMean(ens['refl']) ens_obs = np.empty(ens.shape[:1] + all_obs.shape, dtype=ens_mean.dtype) for lde in xrange(ens.shape[0]): for var in ens_obs.dtype.fields.iterkeys(): for ob_idx, (ob_x, ob_y) in enumerate(zip(obs_x, obs_y)): wdt = temp.getEpochs(aslist=True).index(int(all_obs['nom_time'][ob_idx])) ens_obs[var][lde, ob_idx] = interpolate(ens[var][lde, wdt, np.newaxis], {'y':y_axis, 'x':x_axis}, {'y':ob_y, 'x':ob_y}) # print ens_obs.shape ens_obs_std = np.empty(ens_obs.shape[1:], dtype=ens_obs.dtype) ens_obs_mean = np.empty(ens_obs.shape[1:], dtype=ens_obs.dtype) for var in ens_obs_std.dtype.fields.iterkeys(): ens_obs_std[var] = ens_obs[var].std(ddof=1, axis=0) ens_obs_mean[var] = ens_obs[var].mean(axis=0) cPickle.dump(ens_obs_mean, open("cold_pool_obs_1kmf-%s.pkl" % exp_name, 'w'), -1) # print ens_obs_std.shape # for wdt, (time_sec, time_epoch) in enumerate(zip(temp, temp.getEpochs())): # time_ob_idxs = np.where(all_obs['time'] == time_epoch)[0] # # ob_locations = (all_obs[time_ob_idxs]['longitude'], all_obs[time_ob_idxs]['latitude']) # # temp_K = 5. / 9. * (all_obs[time_ob_idxs]['temp'] - 32) + 273.15 # dewp_K = 5. / 9. * (all_obs[time_ob_idxs]['dewp'] - 32) + 273.15 # # wdir = all_obs[time_ob_idxs]['wind_dir'] # wspd = all_obs[time_ob_idxs]['wind_spd'] # # u = -wspd * np.sin(np.radians(wdir)) # v = -wspd * np.cos(np.radians(wdir)) # # print "Plotting temperature ..." # plotComparison(ens_mean['t'][wdt], ens_obs_mean['t'][time_ob_idxs], ens_obs_std['t'][time_ob_idxs], temp_K, ob_locations, ens_refl[wdt], grid, np.arange(289., 298., 1.), matplotlib.cm.get_cmap('Blues_r'), # "Ensemble Mean/Obs Comparison at Time %s" % time_sec, "cold_pool_t_%s.png" % time_sec) # print "Plotting dewpoint ..." # plotComparison(ens_mean['td'][wdt], ens_obs_mean['td'][time_ob_idxs], ens_obs_std['td'][time_ob_idxs], dewp_K, ob_locations, ens_refl[wdt], grid, np.arange(277., 290., 1.), matplotlib.cm.get_cmap('YlGn'), # "Ensemble Mean/Obs Comparison at Time %s" % time_sec, "cold_pool_td_%s.png" % time_sec) # # print "Plotting u ..." # plotComparison(ens_mean['u'][wdt], ens_obs_mean['u'][time_ob_idxs], ens_obs_std['u'][time_ob_idxs], u, ob_locations, ens_refl[wdt], grid, np.arange(-20., 22., 2.), matplotlib.cm.get_cmap('RdBu_r'), # "Ensemble Mean/Obs Comparison at Time %s" % time_sec, "cold_pool_u_%s.png" % time_sec) # print "Plotting v ..." # plotComparison(ens_mean['v'][wdt], ens_obs_mean['v'][time_ob_idxs], ens_obs_std['v'][time_ob_idxs], v, ob_locations, ens_refl[wdt], grid, np.arange(-20., 22., 2.), matplotlib.cm.get_cmap('RdBu_r'), # "Ensemble Mean/Obs Comparison at Time %s" % time_sec, "cold_pool_v_%s.png" % time_sec) return
def main(): base_path = "/caps2/tsupinie/1kmf-control/" temp = goshen_1km_temporal(start=14400, end=14400) grid = goshen_1km_grid() n_ens_members = 40 x_snd, y_snd = grid.getXY(115, 115) ens_anal = loadEnsemble(base_path, n_ens_members, temp.getTimes(), (['u', 'v', 'pt', 'p', 'qv'], getSndParams), { 'x': x_snd, 'y': y_snd }, fcst=False) ens_fcst = loadEnsemble(base_path, n_ens_members, temp.getTimes(), (['u', 'v', 'pt', 'p', 'qv'], getSndParams), { 'x': x_snd, 'y': y_snd }, fcst=True) robs = RadarObsFile("qc/1km/KCYS.20090605.220000") grdbas = nio.open_file("%s/ena001.hdfgrdbas" % base_path, mode='r', format='hdf') weights = computeInflWeights(grdbas.variables['zp'], robs.heights, grid, robs['Z'] > 20., 6000, 0) ens_mean = np.empty(ens_anal.shape[1:], dtype=ens_anal.dtype) ens_preinfl = np.empty(ens_anal.shape, dtype=ens_anal.dtype) for field in ens_anal.dtype.fields.iterkeys(): ens_mean[field] = ens_anal[field].mean(axis=0) ens_preinfl[field] = undoAdaptiveInfl(ens_fcst[field], ens_anal[field], 0.9) for wdt, (t_ens, time_str) in enumerate( zip(temp, temp.getStrings("%d %B %Y %H%M UTC"))): pylab.figure(figsize=(8, 10)) plotSkewTBackground(pylab.gca()) for n_ens in xrange(n_ens_members): pres_profile = ens_preinfl['p'][n_ens, wdt] temp_profile = theta2Temperature(pt=ens_preinfl['pt'][n_ens, wdt], p=ens_preinfl['p'][n_ens, wdt]) dewp_profile = qv2Dewpoint(qv=ens_preinfl['qv'][n_ens, wdt], p=ens_preinfl['p'][n_ens, wdt]) if np.any(temp_profile < dewp_profile): print "Dewpoint greater than temperature at t=%06d, n=%03d" % ( t_ens, n_ens + 1), np.where(temp_profile < dewp_profile) plotProfile(temp_profile - 273.15, pres_profile / 100., color='r', linewidth=0.5) plotProfile(dewp_profile - 273.15, pres_profile / 100., color='g', linewidth=0.5) mean_pres_profile = ens_mean['p'][wdt] mean_temp_profile = theta2Temperature(pt=ens_mean['pt'][wdt], p=ens_mean['p'][wdt]) mean_dewp_profile = qv2Dewpoint(qv=ens_mean['qv'][wdt], p=ens_mean['p'][wdt]) plotProfile(mean_temp_profile - 273.15, mean_pres_profile / 100., color='k', linewidth=1.5) plotProfile(mean_dewp_profile - 273.15, mean_pres_profile / 100., color='k', linewidth=1.5) pylab.suptitle("Ensemble Soundings at %s" % time_str) pylab.savefig("fcst_snd_1kmf-control_preinfl_%06d.png" % t_ens) return
def main(): ap = argparse.ArgumentParser() ap.add_argument('--data-path', dest='data_path', default="/caps2/tsupinie/") ap.add_argument('--exp-name', dest='exp_name', required=True) args = ap.parse_args() data_path = args.data_path exp_name = args.exp_name base_path = "%s/%s/" % (data_path, exp_name) n_ens_members = 40 domain_bounds = (slice(90, 160), slice(90, 160)) grid = goshen_1km_grid(bounds=domain_bounds) temp = goshen_1km_temporal(start=14400) tornado_track = zip(*((41.63, -104.383), (41.6134, -104.224))) track_xs, track_ys = grid(*[ np.array(list(t)) for t in reversed(tornado_track) ]) updraft_lb = 0 updraft_ub = 3000 updraft_hel_thresh = 125. vert_hel = loadEnsemble(base_path, n_ens_members, temp.getTimes(), ([ 'u', 'v', 'w', 'dx', 'dy' ], computeVH), agl=True) cPickle.dump(vert_hel[3], open("updraft_hel/VH_%s.pkl" % exp_name, 'w'), -1) axes = getAxes(base_path, agl=True) interp_func, bounds_func = getInterpFunctions(axes, {'z':updraft_lb}) updraft_hel = np.empty(vert_hel.shape[:2] + vert_hel.shape[-2:], dtype=vert_hel.dtype) n_ens_members, n_times = vert_hel.shape[:2] for wdt in xrange(n_times): print "Integrating time %06d ..." % temp[wdt] vh_members = [ vert_hel[(lde, wdt)] for lde in xrange(n_ens_members) ] timestep = runConcurrently(integrateUH, vh_members, args=("__placeholder__", (updraft_lb, updraft_ub), axes, interp_func)) for lde in xrange(n_ens_members): updraft_hel[(lde, wdt)] = timestep[lde] # print np.nanmax(updraft_hel) # argmax_uh = np.unravel_index(np.nanargmax(updraft_hel), updraft_hel.shape) # print argmax_uh xs, ys = grid.getXY() prob_color_map = matplotlib.cm.RdYlBu_r prob_color_map.set_under('#ffffff') for updraft_hel_thresh in range(75, 425, 25): prob_updraft_hel = np.nansum(np.nanmax(updraft_hel, axis=1) >= updraft_hel_thresh, axis=0) / float(n_ens_members) nep_updraft_hel = neighborhoodEnsembleProbability(np.nanmax(updraft_hel, axis=1), updraft_hel_thresh) pylab.figure() pylab.pcolormesh(xs, ys, prob_updraft_hel[domain_bounds], vmin=0.1, vmax=1.0, cmap=prob_color_map) pylab.colorbar() pylab.plot(track_xs, track_ys, 'mv-', lw=2.5, mfc='k', ms=8) grid.drawPolitical() pylab.savefig("updraft_hel/UH0-3_%dm2s2_%s.png" % (int(updraft_hel_thresh), exp_name)) pylab.close() pylab.figure() pylab.pcolormesh(xs, ys, nep_updraft_hel[domain_bounds], vmin=0.1, vmax=1.0, cmap=prob_color_map) pylab.colorbar() pylab.plot(track_xs, track_ys, 'mv-', lw=2.5, mfc='k', ms=8) grid.drawPolitical() pylab.savefig("updraft_hel/NEPcirc_UH0-3_%dm2s2_%s.png" % (int(updraft_hel_thresh), exp_name)) pylab.close() if updraft_hel_thresh == 75: cPickle.dump(nep_updraft_hel, open("updraft_hel/NEPcirc_UH0-3_%dm2s2_%s.pkl" % (int(updraft_hel_thresh), exp_name), 'w'), -1) # for lde in xrange(n_ens_members): # pylab.figure() # pylab.pcolormesh(xs, ys, updraft_hel[lde, 0], vmin=25, vmax=125, cmap=prob_color_map) # pylab.colorbar() # grid.drawPolitical() # pylab.savefig("updraft_hel/UH%03d_%s_%06d.png" % (lde + 1, exp_name, 14400)) # pylab.close() return
def main(): base_time = datetime(2009, 6, 5, 18, 0, 0) epoch = datetime(1970, 1, 1, 0, 0, 0) times_seconds = range(14700, 18300, 300) times = [ base_time + timedelta(seconds=t) for t in times_seconds ] n_ensemble_members = 40 exp_name = "zupdtpt" # # Set up the basemap grid # proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs) map = Basemap(**proj) # # Load and thin all the observed data # obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl', 'soundings_clip.pkl'] all_obs = loadObs(obs_file_names, times, map, (goshen_1km_proj['width'], goshen_1km_proj['height']), sounding_obs=['soundings_clip.pkl']) print all_obs.shape[0] ob_first_char = np.array([ id[0] for id in list(all_obs['id']) ]) num_psu_obs = len(np.where(ob_first_char == "P")[0]) num_ttu_obs = len(np.where((ob_first_char == "1") | (ob_first_char == "2"))[0]) num_asos_obs = len(np.where((ob_first_char == "K"))[0]) num_sndg_obs = len(np.where(all_obs['obtype'] == "SNDG")[0]) print "Number of NSSL MM obs used:", num_psu_obs print "Number of TTU Sticknet obs used:", num_ttu_obs print "Number of ASOS obs used:", num_asos_obs print "Number of sounding obs used:", num_sndg_obs all_times = [ datetime(1970, 1, 1, 0, 0, 0) + timedelta(seconds=t) for t in all_obs['time'] ] # # Convert the latitude and longitude observations to x and y on the grid. # obs_x, obs_y = map(all_obs['longitude'], all_obs['latitude']) obs_z = all_obs['pres'] * 100 def getObsData(**kwargs): obs = np.empty(kwargs['pt'].shape, dtype=[('u', np.float32), ('v', np.float32), ('pt', np.float32), ('p', np.float32), ('qv', np.float32)]) obs['u'] = kwargs['u'] obs['v'] = kwargs['v'] obs['pt'] = kwargs['pt'] obs['p'] = kwargs['p'] obs['qv'] = kwargs['qv'] return obs obs_vars = ['u', 'v', 't', 'td'] ens_funcs = { 'u':uFromU, 'v':vFromV, 't':tempFromPt, 'td':dewpFromQv } obs_funcs = { 'u':uFromWind, 'v':vFromWind, 't':tempFromT, 'td':dewpFromTd } avg_crps_values = { } all_crps_values = { } rank_histograms = { } all_alphas = { } all_betas = { } high_outliers = { } low_outliers = { } for time_sec, time in zip(times_seconds, times): # files = glob.glob("/caps2/tsupinie/1kmf-%s/ena???.hdf%06d" % (exp_name, time_sec)) time_idxs = np.where(all_obs['time'] == (time - epoch).total_seconds()) # # Load all the ensemble members and interpolate them to the observation points. Because of the design of my script, I'm # loading the all the members timestep-by-timestep, but there's no reason you can't load them all at once. See the function # definition for the meaning of all the arguments. # # ens_obs, ens_members, ens_times = loadAndInterpolateEnsemble(files, ['u', 'v', 'pt', 'p', 'qv'], getObsData, "/caps2/tsupinie/1kmf-%s/ena001.hdfgrdbas" % exp_name, # {'z':obs_z[time_idxs], 'y':obs_y[time_idxs], 'x':obs_x[time_idxs]}, agl=False, wrap=True, coords='pres') ens_obs = loadEnsemble("/caps2/tsupinie/1kmf-%s/" % exp_name, n_ensemble_members, [ time_sec ], (['u', 'v', 'pt', 'p', 'qv'], getObsData), { 'z':obs_z[time_idxs], 'y':obs_y[time_idxs], 'x':obs_x[time_idxs] }, agl=False, wrap=True, coords='pres') # print ens_obs # # All subsequent lines do the verification # for ob_var in obs_vars: time_crps_values = [] ens_ob_var = ens_funcs[ob_var](**dict([ (n, ens_obs[n][:, 0]) for n in ens_obs.dtype.names ])) obs = obs_funcs[ob_var](**dict([ (n, all_obs[n][time_idxs]) for n in all_obs.dtype.names ])) if ob_var not in rank_histograms: rank_histograms[ob_var] = {} all_crps_values[ob_var] = {} all_alphas[ob_var] = {} all_betas[ob_var] = {} high_outliers[ob_var] = {} low_outliers[ob_var] = {} for region in [ 'inflow', 'outflow', 'sounding' ]: rank_histograms[ob_var][region] = np.zeros((ens_obs.shape[0] + 1,), dtype=int) all_crps_values[ob_var][region] = [] all_alphas[ob_var][region] = [] all_betas[ob_var][region] = [] high_outliers[ob_var][region] = [] low_outliers[ob_var][region] = [] for idx in xrange(obs.shape[-1]): rank_idx = binRank(ens_ob_var[:, idx], obs[idx]) crps, alphas, betas = CRPS(ens_ob_var[:, idx], obs[idx]) high_outlier = heaviside(ens_ob_var[:, idx].max() - obs[idx]) low_outlier = heaviside(ens_ob_var[:, idx].min() - obs[idx]) for region in [ 'inflow', 'outflow', 'sounding' ]: if region in inflow_stations[time_sec] and all_obs['id'][time_idxs][idx] in inflow_stations[time_sec][region]: # plotCDFs(np.sort(ens_ob_var[:, idx]), obs[idx], "CDFs for Surface %s Observation %d and Forecast" % (ob_var, idx), "crps_cdf_sfc_%s_%03d.png" % (ob_var, idx)) rank_histograms[ob_var][region][rank_idx] += 1 all_crps_values[ob_var][region].append(crps) all_alphas[ob_var][region].append(alphas) all_betas[ob_var][region].append(betas) high_outliers[ob_var][region].append(high_outlier) low_outliers[ob_var][region].append(low_outlier) elif region == "sounding" and all_obs['obtype'][time_idxs][idx] == "SNDG": # plotCDFs(np.sort(ens_ob_var[:, idx]), obs[idx], "CDFs for Sounding %s Observation %d and Forecast" % (ob_var, idx), "crps_cdf_sndg_%s_%03d.png" % (ob_var, idx)) rank_histograms[ob_var][region][rank_idx] += 1 all_crps_values[ob_var][region].append(crps) all_alphas[ob_var][region].append(alphas) all_betas[ob_var][region].append(betas) high_outliers[ob_var][region].append(high_outlier) low_outliers[ob_var][region].append(low_outlier) time_crps_values.append(crps) try: avg_crps_values[ob_var].append(sum(time_crps_values) / len(time_crps_values)) except KeyError: avg_crps_values[ob_var] = [ sum(time_crps_values) / len(time_crps_values) ] def dictmean(D): all_lists = [] for val in D.itervalues(): all_lists.extend(val) return np.array(all_lists).mean(axis=0) def dictsum(D): all_lists = [] for val in D.itervalues(): all_lists.append(val) return np.array(all_lists).sum(axis=0) def mean(L): return np.array(L).mean(axis=0) if not os.path.exists("images-%s" % exp_name): os.mkdir("images-%s" % exp_name, 0755) cPickle.dump(avg_crps_values, open("%s_crps.pkl" % exp_name, 'w'), -1) cPickle.dump(all_crps_values, open("%s_crps_breakdown.pkl" % exp_name, 'w'), -1) cPickle.dump((all_alphas, all_betas, high_outliers, low_outliers), open("%s_crps_pieces.pkl" % exp_name, 'w'), -1) for ob_var in obs_vars: total_obs = sum([ len(v) for v in high_outliers[ob_var].itervalues() ]) print total_obs createVerificationGraphs(dictmean(all_alphas[ob_var]), dictmean(all_betas[ob_var]), dictmean(high_outliers[ob_var]), dictmean(low_outliers[ob_var]), dictsum(rank_histograms[ob_var]).astype(float) / total_obs, total_obs, "%s" % ob_var, exp_name) for region in [ 'inflow', 'outflow', 'sounding' ]: suffix = "%s_%s" % (ob_var, region) region_obs = len(high_outliers[ob_var][region]) createVerificationGraphs(mean(all_alphas[ob_var][region]), mean(all_betas[ob_var][region]), mean(high_outliers[ob_var][region]), mean(low_outliers[ob_var][region]), rank_histograms[ob_var][region].astype(float) / region_obs, region_obs, suffix, exp_name) pylab.clf() pylab.plot(times_seconds, avg_crps_values[ob_var]) pylab.savefig("crps_avg_%s.png" % ob_var) return
def main(): base_path = "/caps2/tsupinie/" ap = argparse.ArgumentParser() ap.add_argument('--exp-name', dest='exp_name', required=True) args = ap.parse_args() n_ens_members = 40 exp_name = args.exp_name bounds_obs = (slice(100, 180), slice(90, 170)) grid_obs = goshen_1km_grid(bounds=bounds_obs) bounds = (slice(None), slice(None)) grid = goshen_1km_grid(bounds=bounds) temp = goshen_1km_temporal(start=14400) obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl'] all_obs = loadObs(obs_file_names, temp.getDatetimes(aslist=True), grid_obs, grid_obs.getWidthHeight()) obs_xy = np.vstack(grid(all_obs['longitude'], all_obs['latitude'])).T ens = loadEnsemble("/caps2/tsupinie/%s/" % exp_name, n_ens_members, temp.getTimes(), (['u', 'v', 'pt', 'p', 'qv'], getTempDewpRefl), {'sigma': 2}, agl=True, wrap=True) grid_xs, grid_ys = grid.getXY() obs_t_verif = [] for wdt, (time_sec, time_epoch) in enumerate(zip(temp, temp.getEpochs())): try: mo = ARPSModelObsFile("%s/%s/KCYSan%06d" % (base_path, exp_name, time_sec)) except AssertionError: mo = ARPSModelObsFile("%s/%s/KCYSan%06d" % (base_path, exp_name, time_sec), mpi_config=(2, 12)) except: print "Can't load reflectivity ..." mo = {'Z': np.zeros((1, 255, 255), dtype=np.float32)} time_ob_idxs = np.where(all_obs['nom_time'] == time_epoch)[0] time_obs = all_obs[time_ob_idxs] time_obs_xy = obs_xy[time_ob_idxs] obs_intrp = griddata(time_obs_xy, 5. / 9. * (time_obs['temp'] - 32) + 273.15, (grid_xs, grid_ys)) print np.isfinite(obs_intrp).sum() pylab.figure() pylab.contourf(grid_xs, grid_ys, ens['t'][:, wdt].mean(axis=0)[bounds] - obs_intrp, levels=np.arange(-6, 6.5, 0.5), cmap=matplotlib.cm.get_cmap("RdBu_r")) pylab.colorbar() pylab.contour(grid_xs, grid_ys, mo['Z'][0][tuple(reversed(bounds))], levels=np.arange(10, 80, 10), colors='k') grid.drawPolitical() pylab.savefig("obs_verif/obs_%s_t_grid_%06d.png" % (exp_name[5:], time_sec)) pylab.close() obs_t_verif.append(ens['t'][:, wdt].mean(axis=0) - obs_intrp) cPickle.dump(np.array(obs_t_verif), open("obs_verif/obs_verif_%s.pkl" % exp_name, 'w'), -1) return
def main(): ap = argparse.ArgumentParser() ap.add_argument('--data-dir', dest='data_dir', default=None) ap.add_argument('--parameter', dest='parameter', default='vort') ap.add_argument('--height', dest='interp_height', type=float, default=75.) ap.add_argument('--tag', dest='tag', required=True) args = ap.parse_args() if args.data_dir is None: nx, ny = 50, 50 grid_spacing = 1000 n_ensemble_members = 40 times = range(0, 1800, 300) u_all = np.empty((len(times), n_ensemble_members, ny, nx)) v_all = np.empty((len(times), n_ensemble_members, ny, nx)) for wdt, t_ens in enumerate(times): u, v = generateFakeEnsemble((nx, ny), grid_spacing, (grid_spacing * nx / 2, grid_spacing * (ny / 2 - (900 - t_ens) / 90.)), n_ensemble_members, (2 + t_ens / 900.) * grid_spacing) u_all[wdt] = u v_all[wdt] = v u_all = np.transpose(u_all, (1, 0, 2, 3)) v_all = np.transpose(v_all, (1, 0, 2, 3)) else: # files = glob.glob("%s/ena???.hdf0*" % args.data_dir) # files = glob.glob("%s/ena???.hdf010800" % args.data_dir) if args.parameter == 'vort': var_list = ['u', 'v', 'dx', 'dy'] func = computeVorticity elif args.parameter == 'refl': func = computeReflectivity var_list = ['p', 'pt', 'qr', 'qs', 'qh'] elif args.parameter == 'w': func = lambda **kwargs: kwargs['w'] var_list = [ 'w' ] n_ensemble_members = 40 times = np.arange(14400, 18300, 300) param_all = loadEnsemble(args.data_dir, n_ensemble_members, times, (var_list, func), { 'z':args.interp_height }, agl=True) cutoff = np.where(times == 14400)[0] if args.parameter == 'vort': threshold = 0.0075 lower_p_bound = 0.2 elif args.parameter == 'refl': threshold = 40. lower_p_bound = 0.1 elif args.parameter == 'w': threshold = 5. lower_p_bound = 0.1 objects = findProbObjects(param_all, threshold, 0.2) max_prob_all, argmax_prob_all = swath(param_all, threshold, n_ensemble_members, times, lower_p_bound) if cutoff < len(times): max_prob_da, argmax_prob_da = swath(param_all[:, :(cutoff + 1), :, :], threshold, n_ensemble_members, times[:(cutoff + 1)], lower_p_bound) max_prob_fcst, argmax_prob_fcst = swath(param_all[:, cutoff:, :, :], threshold, n_ensemble_members, times[cutoff:], lower_p_bound) if args.parameter == 'w': cPickle.dump((max_prob_all, max_prob_da, max_prob_fcst, objects), open("max_%s_prob_%dm_%s.pkl" % (args.parameter, args.interp_height, args.tag), 'w'), -1) cPickle.dump((argmax_prob_all, argmax_prob_da, argmax_prob_fcst ), open("argmax_%s_prob_%dm_%s.pkl" % (args.parameter, args.interp_height, args.tag), 'w'), -1) else: cPickle.dump((max_prob_all, max_prob_da, max_prob_fcst), open("max_%s_prob_%dm_%s.pkl" % (args.parameter, args.interp_height, args.tag), 'w'), -1) cPickle.dump((argmax_prob_all, argmax_prob_da, argmax_prob_fcst), open("argmax_%s_prob_%dm_%s.pkl" % (args.parameter, args.interp_height, args.tag), 'w'), -1) else: if args.parameter == 'w': cPickle.dump((max_prob_all, objects), open("max_%s_prob_%dm_%s.pkl" % (args.parameter, args.interp_height, args.tag), 'w'), -1) cPickle.dump((argmax_prob_all,), open("argmax_%s_prob_%dm_%s.pkl" % (args.parameter, args.interp_height, args.tag), 'w'), -1) else: cPickle.dump((max_prob_all,), open("max_%s_prob_%dm_%s.pkl" % (args.parameter, args.interp_height, args.tag), 'w'), -1) cPickle.dump((argmax_prob_all,), open("argmax_%s_prob_%dm_%s.pkl" % (args.parameter, args.interp_height, args.tag), 'w'), -1) return
def main(): ap = argparse.ArgumentParser() ap.add_argument('--data-path', dest='data_path', default="/caps2/tsupinie/") ap.add_argument('--exp-name', dest='exp_name', required=True) args = ap.parse_args() data_path = args.data_path exp_name = args.exp_name base_path = "%s/%s/" % (data_path, exp_name) n_ens_members = 40 domain_bounds = (slice(90, 160), slice(90, 160)) grid = goshen_1km_grid(bounds=domain_bounds) temp = goshen_1km_temporal(start=14400) tornado_track = zip(*((41.63, -104.383), (41.6134, -104.224))) track_xs, track_ys = grid( *[np.array(list(t)) for t in reversed(tornado_track)]) updraft_lb = 0 updraft_ub = 3000 updraft_hel_thresh = 125. vert_hel = loadEnsemble(base_path, n_ens_members, temp.getTimes(), (['u', 'v', 'w', 'dx', 'dy'], computeVH), agl=True) cPickle.dump(vert_hel[3], open("updraft_hel/VH_%s.pkl" % exp_name, 'w'), -1) axes = getAxes(base_path, agl=True) interp_func, bounds_func = getInterpFunctions(axes, {'z': updraft_lb}) updraft_hel = np.empty(vert_hel.shape[:2] + vert_hel.shape[-2:], dtype=vert_hel.dtype) n_ens_members, n_times = vert_hel.shape[:2] for wdt in xrange(n_times): print "Integrating time %06d ..." % temp[wdt] vh_members = [vert_hel[(lde, wdt)] for lde in xrange(n_ens_members)] timestep = runConcurrently(integrateUH, vh_members, args=("__placeholder__", (updraft_lb, updraft_ub), axes, interp_func)) for lde in xrange(n_ens_members): updraft_hel[(lde, wdt)] = timestep[lde] # print np.nanmax(updraft_hel) # argmax_uh = np.unravel_index(np.nanargmax(updraft_hel), updraft_hel.shape) # print argmax_uh xs, ys = grid.getXY() prob_color_map = matplotlib.cm.RdYlBu_r prob_color_map.set_under('#ffffff') for updraft_hel_thresh in range(75, 425, 25): prob_updraft_hel = np.nansum( np.nanmax(updraft_hel, axis=1) >= updraft_hel_thresh, axis=0) / float(n_ens_members) nep_updraft_hel = neighborhoodEnsembleProbability( np.nanmax(updraft_hel, axis=1), updraft_hel_thresh) pylab.figure() pylab.pcolormesh(xs, ys, prob_updraft_hel[domain_bounds], vmin=0.1, vmax=1.0, cmap=prob_color_map) pylab.colorbar() pylab.plot(track_xs, track_ys, 'mv-', lw=2.5, mfc='k', ms=8) grid.drawPolitical() pylab.savefig("updraft_hel/UH0-3_%dm2s2_%s.png" % (int(updraft_hel_thresh), exp_name)) pylab.close() pylab.figure() pylab.pcolormesh(xs, ys, nep_updraft_hel[domain_bounds], vmin=0.1, vmax=1.0, cmap=prob_color_map) pylab.colorbar() pylab.plot(track_xs, track_ys, 'mv-', lw=2.5, mfc='k', ms=8) grid.drawPolitical() pylab.savefig("updraft_hel/NEPcirc_UH0-3_%dm2s2_%s.png" % (int(updraft_hel_thresh), exp_name)) pylab.close() if updraft_hel_thresh == 75: cPickle.dump( nep_updraft_hel, open( "updraft_hel/NEPcirc_UH0-3_%dm2s2_%s.pkl" % (int(updraft_hel_thresh), exp_name), 'w'), -1) # for lde in xrange(n_ens_members): # pylab.figure() # pylab.pcolormesh(xs, ys, updraft_hel[lde, 0], vmin=25, vmax=125, cmap=prob_color_map) # pylab.colorbar() # grid.drawPolitical() # pylab.savefig("updraft_hel/UH%03d_%s_%06d.png" % (lde + 1, exp_name, 14400)) # pylab.close() return