예제 #1
0
def main():
    experiments = [ '1kmf-sndr0h=50km', '1kmf-zs-no-05XP', '1kmf-zs-no-mm-05XP', '1kmf-zs-no-mm', '1kmf-z-no-snd', '1kmf-z-no-v2' ]

    grid = goshen_1km_grid(bounds=(slice(100, 180), slice(90, 170)))
    temp = goshen_1km_temporal(start=14400)

    obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl']
    all_obs = loadObs(obs_file_names, temp.getDatetimes(aslist=True), grid, grid.getWidthHeight())

    ens_obs = {}
    for exp in experiments:
        ens_obs[exp] = cPickle.load(open("cold_pool_obs_%s.pkl" % exp, 'r'))

    mm_ids = np.unique1d(all_obs['id'])

    for id in mm_ids:
        id_idxs = np.where(all_obs['id'] == id)
        for ob_var, ens_var in [('temp', 't'), ('dewp', 'td')]:

            pylab.figure()
            pylab.plot(all_obs['time'][id_idxs], 5 / 9. * (all_obs[ob_var][id_idxs] - 32), 'k-', label='Observed')

            for exp_name, exp in ens_obs.iteritems():
                pylab.plot(all_obs['time'][id_idxs], exp[ens_var][id_idxs] - 273.15, label=exp_name)

            pylab.xticks(temp.getEpochs(aslist=True), temp.getStrings("%H%M", aslist=True), rotation=30)
            pylab.xlim(temp.getEpochs(aslist=True)[0], temp.getEpochs(aslist=True)[-1])
            pylab.legend(loc=1)

            pylab.savefig("mm_timeseries_%s_%s.png" % (ens_var, id))
            pylab.close()        

    return
def main():
    base_path = "/caps2/tsupinie/"
    ap = argparse.ArgumentParser()
    ap.add_argument('--exp-name', dest='exp_name', required=True)

    args = ap.parse_args()

    n_ens_members = 40
    exp_name = args.exp_name

    bounds_obs = (slice(100, 180), slice(90, 170))
    grid_obs = goshen_1km_grid(bounds=bounds_obs)

    bounds = (slice(None), slice(None))
    grid = goshen_1km_grid(bounds=bounds)

    temp = goshen_1km_temporal(start=14400)

    obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl']
    all_obs = loadObs(obs_file_names, temp.getDatetimes(aslist=True), grid_obs, grid_obs.getWidthHeight())
    obs_xy = np.vstack(grid(all_obs['longitude'], all_obs['latitude'])).T

    ens = loadEnsemble("/caps2/tsupinie/%s/" % exp_name, n_ens_members, temp.getTimes(), (['u', 'v', 'pt', 'p', 'qv'], getTempDewpRefl), {'sigma':2}, agl=True, wrap=True) 

    grid_xs, grid_ys = grid.getXY()
    obs_t_verif = []
    for wdt, (time_sec, time_epoch) in enumerate(zip(temp, temp.getEpochs())):

        try:
            mo = ARPSModelObsFile("%s/%s/KCYSan%06d" % (base_path, exp_name, time_sec))
        except AssertionError:
            mo = ARPSModelObsFile("%s/%s/KCYSan%06d" % (base_path, exp_name, time_sec), mpi_config=(2, 12))
        except:
            print "Can't load reflectivity ..."
            mo = {'Z':np.zeros((1, 255, 255), dtype=np.float32)}

        time_ob_idxs = np.where(all_obs['nom_time'] == time_epoch)[0]

        time_obs = all_obs[time_ob_idxs]
        time_obs_xy = obs_xy[time_ob_idxs]

        obs_intrp = griddata(time_obs_xy, 5. / 9. * (time_obs['temp'] - 32) + 273.15, (grid_xs, grid_ys))
        print np.isfinite(obs_intrp).sum()

        pylab.figure()

        pylab.contourf(grid_xs, grid_ys, ens['t'][:, wdt].mean(axis=0)[bounds] - obs_intrp, levels=np.arange(-6, 6.5, 0.5), cmap=matplotlib.cm.get_cmap("RdBu_r"))
        pylab.colorbar()

        pylab.contour(grid_xs, grid_ys, mo['Z'][0][tuple(reversed(bounds))], levels=np.arange(10, 80, 10), colors='k')

        grid.drawPolitical()

        pylab.savefig("obs_verif/obs_%s_t_grid_%06d.png" % (exp_name[5:], time_sec))
        pylab.close()
        obs_t_verif.append(ens['t'][:, wdt].mean(axis=0) - obs_intrp)

    cPickle.dump(np.array(obs_t_verif), open("obs_verif/obs_verif_%s.pkl" % exp_name, 'w'), -1)
    return
예제 #3
0
def main():
    _epoch_time = datetime(1970, 1, 1, 0, 0, 0)
    _initial_time = datetime(2009, 6, 5, 18, 0, 0) - _epoch_time
    _initial_time = (_initial_time.microseconds + (_initial_time.seconds + _initial_time.days * 24 * 3600) * 1e6) / 1e6
    _target_times = [ 1800, 3600, 5400, 7200, 9000, 10800, 11100, 11400, 11700, 12000, 12300, 12600, 12900, 13200, 13500, 13800, 14100, 14400,
        14700, 15000, 15300, 15600, 15900, 16200, 16500, 16800, 17100, 17400, 17700, 18000 ]

    inflow_wd_lbound, inflow_wd_ubound = (100, 240)

#   bounds = (0, slice(90, 210), slice(40, 160))
#   bounds = (0, slice(100, 180), slice(90, 170))
    bounds = (0, slice(115, 140), slice(120, 145))
    rev_bounds = [ 0 ]
    rev_bounds.extend(bounds[2:0:-1])
    rev_bounds = tuple(rev_bounds)

    refl_base = "hdf/KCYS/1km/goshen.hdfrefl2d"
    refl_times = np.array([ int(f[-6:]) for f in glob.glob("%s??????" % refl_base) ])
    refl_keep_times = []
    refl_data = {}

    for tt in _target_times:
        idx = np.argmin(np.abs(refl_times - tt))
        if refl_times[idx] > tt and idx > 0:
            idx -= 1

        file_name = "%s%06d" % (refl_base, refl_times[idx])
        hdf = nio.open_file(file_name, mode='r', format='hdf')
        refl_keep_times.append(refl_times[idx])
        refl_data[refl_times[idx]] = hdf.variables['refl2d'][rev_bounds]

    _proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs, bounds=bounds[1:])
#   _proj['resolution'] = 'h' 
    map = Basemap(**_proj)

    ttu_sticknet_obs = cPickle.load(open("ttu_sticknet.pkl", 'r'))
    psu_straka_obs = cPickle.load(open("psu_straka_mesonet.pkl", 'r'))

    all_obs = loadObs(['ttu_sticknet.pkl', 'psu_straka_mesonet.pkl'], [ _epoch_time + timedelta(seconds=(_initial_time + t)) for t in _target_times ],  map, (goshen_1km_proj['width'], goshen_1km_proj['height']), round_time=True)
    print all_obs

#   partitioned_obs = gatherObservations(all_obs, [ _initial_time + t for t in _target_times ])
    for time, refl_time in zip([ _initial_time + t for t in _target_times], refl_keep_times):
        time_str = (_epoch_time + timedelta(seconds=time)).strftime("%d %B %Y %H%M UTC")

        plot_obs = all_obs[np.where(all_obs['time'] == time)]

        inflow_idxs = np.where((plot_obs['wind_dir'] >= inflow_wd_lbound) & (plot_obs['wind_dir'] <= inflow_wd_ubound))[0]
        outflow_idxs = np.array([ idx for idx in range(plot_obs['id'].shape[0]) if idx not in inflow_idxs ])

        title = "All MM observations at %s" % time_str
        file_name = "mm_obs_%06d.png" % (time - _initial_time)

        plotObservations(plot_obs, map, title, file_name, refl=refl_data[refl_time])
    return
예제 #4
0
def computeUncertainty(var_order, region_order):
    base_time = datetime(2009, 6, 5, 18, 0, 0)
    epoch = datetime(1970, 1, 1, 0, 0, 0)
    base_epoch = (base_time - epoch).total_seconds()
    times = np.arange(14700, 18300, 300)

    proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs)
    map = Basemap(**proj)

    obs_file_names = [
        'psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl',
        'soundings_clip.pkl'
    ]
    obs = loadObs(obs_file_names,
                  [base_time + timedelta(seconds=int(t)) for t in times],
                  map, (goshen_1km_proj['width'], goshen_1km_proj['height']),
                  sounding_obs=['soundings_clip.pkl'])

    obs_part = partitionObs(obs, base_epoch)
    u, v = windDirSpd2UV(obs['wind_dir'], obs['wind_spd'])
    u_part, v_part = {}, {}
    for region, reg_obs in obs_part.iteritems():
        u_part[region], v_part[region] = windDirSpd2UV(
            obs_part[region]['wind_dir'], obs_part[region]['wind_spd'])

    all_uncert = []

    def row(all_obs, part_obs, name, units):
        uncert = uncertainty(all_obs)
        row_uncert = [uncert]

        row = "\t" + r"%s (%s) & %.2f" % (name, units, uncert)
        for region in region_order:
            uncert = uncertainty(part_obs[region])
            row += " & %.2f" % uncert
            row_uncert.append(uncert)
        print row + r"\\"
        print "\t" + r"\hline"
        return np.array(row_uncert)

    all_uncert.append(
        row(obs['temp'],
            dict([(key, val['temp']) for key, val in obs_part.iteritems()]),
            '$T$', r'$^{\circ}$F'))
    all_uncert.append(
        row(obs['dewp'],
            dict([(key, val['dewp']) for key, val in obs_part.iteritems()]),
            '$T_d$', r'$^{\circ}$F'))
    all_uncert.append(row(u, u_part, '$u$', r'm s$^{-1}$'))
    all_uncert.append(row(v, v_part, '$v$', r'm s$^{-1}$'))
    return np.array(all_uncert)
예제 #5
0
def main():
    experiments = [
        '1kmf-sndr0h=50km', '1kmf-zs-no-05XP', '1kmf-zs-no-mm-05XP',
        '1kmf-zs-no-mm', '1kmf-z-no-snd', '1kmf-z-no-v2'
    ]

    grid = goshen_1km_grid(bounds=(slice(100, 180), slice(90, 170)))
    temp = goshen_1km_temporal(start=14400)

    obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl']
    all_obs = loadObs(obs_file_names, temp.getDatetimes(aslist=True), grid,
                      grid.getWidthHeight())

    ens_obs = {}
    for exp in experiments:
        ens_obs[exp] = cPickle.load(open("cold_pool_obs_%s.pkl" % exp, 'r'))

    mm_ids = np.unique1d(all_obs['id'])

    for id in mm_ids:
        id_idxs = np.where(all_obs['id'] == id)
        for ob_var, ens_var in [('temp', 't'), ('dewp', 'td')]:

            pylab.figure()
            pylab.plot(all_obs['time'][id_idxs],
                       5 / 9. * (all_obs[ob_var][id_idxs] - 32),
                       'k-',
                       label='Observed')

            for exp_name, exp in ens_obs.iteritems():
                pylab.plot(all_obs['time'][id_idxs],
                           exp[ens_var][id_idxs] - 273.15,
                           label=exp_name)

            pylab.xticks(temp.getEpochs(aslist=True),
                         temp.getStrings("%H%M", aslist=True),
                         rotation=30)
            pylab.xlim(
                temp.getEpochs(aslist=True)[0],
                temp.getEpochs(aslist=True)[-1])
            pylab.legend(loc=1)

            pylab.savefig("mm_timeseries_%s_%s.png" % (ens_var, id))
            pylab.close()

    return
예제 #6
0
def computeUncertainty(var_order, region_order):
    base_time = datetime(2009, 6, 5, 18, 0, 0)
    epoch = datetime(1970, 1, 1, 0, 0, 0)
    base_epoch = (base_time - epoch).total_seconds()
    times = np.arange(14700, 18300, 300)

    proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs)
    map = Basemap(**proj)

    obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl', 'soundings_clip.pkl']
    obs = loadObs(obs_file_names, [ base_time + timedelta(seconds=int(t)) for t in times ], map, (goshen_1km_proj['width'], goshen_1km_proj['height']), sounding_obs=['soundings_clip.pkl'])


    obs_part = partitionObs(obs, base_epoch)
    u, v = windDirSpd2UV(obs['wind_dir'], obs['wind_spd'])
    u_part, v_part = {}, {}
    for region, reg_obs in obs_part.iteritems():
        u_part[region], v_part[region] = windDirSpd2UV(obs_part[region]['wind_dir'], obs_part[region]['wind_spd'])

    all_uncert = []

    def row(all_obs, part_obs, name, units):
        uncert = uncertainty(all_obs)
        row_uncert = [ uncert ]

        row = "\t" + r"%s (%s) & %.2f" % (name, units, uncert)
        for region in region_order:
            uncert = uncertainty(part_obs[region])
            row += " & %.2f" % uncert
            row_uncert.append(uncert)
        print row + r"\\"
        print "\t" + r"\hline"
        return np.array(row_uncert)

    all_uncert.append(row(obs['temp'], dict([ (key, val['temp'] ) for key, val in obs_part.iteritems()]), '$T$', r'$^{\circ}$F'))
    all_uncert.append(row(obs['dewp'], dict([ (key, val['dewp'] ) for key, val in obs_part.iteritems()]), '$T_d$', r'$^{\circ}$F'))
    all_uncert.append(row(u, u_part, '$u$', r'm s$^{-1}$'))
    all_uncert.append(row(v, v_part, '$v$', r'm s$^{-1}$'))
    return np.array(all_uncert)
예제 #7
0
def main():
    _epoch_time = datetime(1970, 1, 1, 0, 0, 0)
    _initial_time = datetime(2009, 6, 5, 18, 0, 0) - _epoch_time
    _initial_time = (
        _initial_time.microseconds +
        (_initial_time.seconds + _initial_time.days * 24 * 3600) * 1e6) / 1e6
    _target_times = [
        1800, 3600, 5400, 7200, 9000, 10800, 11100, 11400, 11700, 12000, 12300,
        12600, 12900, 13200, 13500, 13800, 14100, 14400, 14700, 15000, 15300,
        15600, 15900, 16200, 16500, 16800, 17100, 17400, 17700, 18000
    ]

    inflow_wd_lbound, inflow_wd_ubound = (100, 240)

    #   bounds = (0, slice(90, 210), slice(40, 160))
    #   bounds = (0, slice(100, 180), slice(90, 170))
    bounds = (0, slice(115, 140), slice(120, 145))
    rev_bounds = [0]
    rev_bounds.extend(bounds[2:0:-1])
    rev_bounds = tuple(rev_bounds)

    refl_base = "hdf/KCYS/1km/goshen.hdfrefl2d"
    refl_times = np.array(
        [int(f[-6:]) for f in glob.glob("%s??????" % refl_base)])
    refl_keep_times = []
    refl_data = {}

    for tt in _target_times:
        idx = np.argmin(np.abs(refl_times - tt))
        if refl_times[idx] > tt and idx > 0:
            idx -= 1

        file_name = "%s%06d" % (refl_base, refl_times[idx])
        hdf = nio.open_file(file_name, mode='r', format='hdf')
        refl_keep_times.append(refl_times[idx])
        refl_data[refl_times[idx]] = hdf.variables['refl2d'][rev_bounds]

    _proj = setupMapProjection(goshen_1km_proj,
                               goshen_1km_gs,
                               bounds=bounds[1:])
    #   _proj['resolution'] = 'h'
    map = Basemap(**_proj)

    ttu_sticknet_obs = cPickle.load(open("ttu_sticknet.pkl", 'r'))
    psu_straka_obs = cPickle.load(open("psu_straka_mesonet.pkl", 'r'))

    all_obs = loadObs(['ttu_sticknet.pkl', 'psu_straka_mesonet.pkl'], [
        _epoch_time + timedelta(seconds=(_initial_time + t))
        for t in _target_times
    ],
                      map,
                      (goshen_1km_proj['width'], goshen_1km_proj['height']),
                      round_time=True)
    print all_obs

    #   partitioned_obs = gatherObservations(all_obs, [ _initial_time + t for t in _target_times ])
    for time, refl_time in zip([_initial_time + t for t in _target_times],
                               refl_keep_times):
        time_str = (_epoch_time +
                    timedelta(seconds=time)).strftime("%d %B %Y %H%M UTC")

        plot_obs = all_obs[np.where(all_obs['time'] == time)]

        inflow_idxs = np.where((plot_obs['wind_dir'] >= inflow_wd_lbound)
                               & (plot_obs['wind_dir'] <= inflow_wd_ubound))[0]
        outflow_idxs = np.array([
            idx for idx in range(plot_obs['id'].shape[0])
            if idx not in inflow_idxs
        ])

        title = "All MM observations at %s" % time_str
        file_name = "mm_obs_%06d.png" % (time - _initial_time)

        plotObservations(plot_obs,
                         map,
                         title,
                         file_name,
                         refl=refl_data[refl_time])
    return
예제 #8
0
파일: cov.py 프로젝트: tsupinie/research
def main():
    times_sec = range(11100, 14700, 300)
    run_base_time = datetime(2009, 6, 5, 18, 0, 0)
    times_dt = [ run_base_time + timedelta(seconds=t) for t in times_sec ]
    radar_elev, radar_lat, radar_lon = 1883, 41.151944, -104.806111
    proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs)

    variables = [ 'pt' ]
    interp_height = 25

    if len(variables) == 2:
        var1, var2 = variables
    else:
        var1 = variables[0]
        var2 = variables[0]

    map = Basemap(**proj)
    radar_x, radar_y = map(radar_lon, radar_lat)

    obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl', 'soundings_da.pkl']
    all_obs = loadObs(obs_file_names, times_dt, map, sounding_obs=['soundings_da.pkl'])

    print all_obs

    forecast_base = "/caps1/tsupinie/1km-control-20120712/"
    grdbas_file_name = "/caps1/tsupinie/1km-control-20120712/ena001.hdfgrdbas"

    enf_files = glob.glob("%s/enf???.hdf0*" % forecast_base)

    enf_data, ens_members, ens_times = loadAndInterpolateEnsemble(enf_files, variables, toRecArray, grdbas_file_name, 
        points=None, agl=True, wrap=False)

    refl_ens_mean, ens_refl, ens_members, ens_times = loadAndInterpolateEnsemble(enf_files, ['pt', 'p', 'qr', 'qs', 'qh'], computeReflectivity, "/caps1/tsupinie/1km-control-20120712/ena001.hdfgrdbas", 
        {'z':interp_height}, agl=True, wrap=False, aggregator=lambda x: np.mean(x, axis=0))

    grdbas = nio.open_file(grdbas_file_name, mode='r', format='hdf')
    axes_msl = { 'z':grdbas.variables['zp'][:], 'y':grdbas.variables['y'][:], 'x':grdbas.variables['x'][:] }
    axes_agl = { 'z':_makeZCoordsAGL(grdbas.variables['zp'][:]), 'y':grdbas.variables['y'][:], 'x':grdbas.variables['x'][:] }

    for wdt, time_dt in enumerate(times_dt):
        print "Working on time %s" % str(time_dt)
        time_idxs = np.where(all_obs['time'] == (time_dt - datetime(1970, 1, 1, 0, 0, 0)).total_seconds())
        cov = loadAndComputeCovariance(all_obs[time_idxs], enf_data[var1][:, wdt], enf_data[var2][:, wdt], map, axes_msl, False)
        cor = loadAndComputeCovariance(all_obs[time_idxs], enf_data[var1][:, wdt], enf_data[var2][:, wdt], map, axes_msl, True)

        for ob_idx in xrange(cov.shape[0]):
            ob = all_obs[time_idxs[0][ob_idx]]
            interp_cov = interpolate(cov[ob_idx], axes_agl, { 'z':interp_height })
            interp_cor = interpolate(cor[ob_idx], axes_agl, { 'z':interp_height })

            roi = 50
            if ob['obtype'] == "SA":
                roi = 300
            elif ob['obtype'] == "SNDG":
                obs_x, obs_y = map(ob['longitude'], ob['latitude'])
                interp_height_msl = interpolate(axes_msl['z'], axes_msl, { 'y':obs_y, 'x':obs_x }, wrap=True)[0]
                print interp_height_msl

                r_h = 150.
                r_v = 6.

                z_diff = (interp_height_msl - ob['elevation']) / 1000.
                if np.abs(z_diff) > r_v:
                    roi = 0
                else:
                    roi = r_h * np.sqrt(1 - (z_diff / r_v) ** 2)

                print roi
            print np.nanmin(interp_cov), np.nanmax(interp_cov)

            plotCov(interp_cov, refl_ens_mean[wdt], (ob['elevation'], ob['latitude'], ob['longitude']), map, goshen_1km_gs, "cov_%06d_ob%02d.png" % (times_sec[wdt], ob_idx), roi=roi, normalize=(-2.0, 2.0))
            plotCov(interp_cor, refl_ens_mean[wdt], (ob['elevation'], ob['latitude'], ob['longitude']), map, goshen_1km_gs, "cor_%06d_ob%02d.png" % (times_sec[wdt], ob_idx), roi=roi)

    return
예제 #9
0
def main():
    exp_names = ["no-mm", "mm", "mod-05XP"]
    labels = {"no-mm": "No MM", "mm": "MM", "mod-05XP": "MM + MWR05XP"}
    parameters = ['t', 'td', 'u', 'v']
    param_names = {
        't': "Temperature",
        'td': "Dewpoint",
        'u': r'$u$ Wind',
        'v': r'$v$ Wind'
    }
    units = {
        't': r'$^{\circ}$F',
        'td': r'$^{\circ}$F',
        'u': r'm s$^{-1}$',
        'v': r'm s$^{-1}$'
    }

    proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs)
    map = Basemap(**proj)

    times = np.arange(14700, 18300, 300)
    base_time = datetime(2009, 6, 5, 18, 0, 0)
    dt_times = [base_time + timedelta(seconds=int(t)) for t in times]
    epoch = datetime(1970, 1, 1, 0, 0, 0)
    base_epoch = (base_time - epoch).total_seconds()

    obs_file_names = [
        'psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl',
        'soundings_clip.pkl'
    ]
    all_obs = loadObs(obs_file_names,
                      dt_times,
                      map,
                      (goshen_1km_proj['width'], goshen_1km_proj['height']),
                      sounding_obs=['soundings_clip.pkl'])

    ob_nums = []
    for t in times:
        obs_idxs = np.where(all_obs['time'] == base_epoch + t)[0]
        ob_nums.append(len(obs_idxs))

    figures = dict([(p, pylab.figure()) for p in parameters])
    axes = {}
    for p in parameters:
        pylab.figure(figures[p].number)
        axes[p] = pylab.axes((0.09, 0.12, 0.82, 0.8))

    for name in exp_names:
        crps = cPickle.load(open("%s_crps.pkl" % name, 'r'))
        for param in parameters:
            pylab.figure(figures[param].number)
            pylab.sca(axes[param])

            pylab.plot(times, crps[param], label=labels[name])

    for param in parameters:
        pylab.figure(figures[param].number)

        num_label_trans = transforms.blended_transform_factory(
            pylab.gca().transData,
            pylab.gca().transAxes)

        for t, n_obs in zip(times, ob_nums):
            pylab.text(t,
                       0.025,
                       "%d" % n_obs,
                       weight='bold',
                       style='italic',
                       size='xx-large',
                       transform=num_label_trans,
                       ha='center',
                       bbox={
                           'facecolor': '#ffffff',
                           'alpha': 0.7
                       })

        pylab.xlim(times.min(), times.max())
        lb_y, ub_y = pylab.ylim()
        pylab.ylim(0, ub_y)

        pylab.xlabel("Time (UTC)", size='large')
        pylab.ylabel("CRPS (%s)" % units[param], size='large')

        pylab.xticks(times,
                     [(base_time + timedelta(seconds=int(t))).strftime("%H%M")
                      for t in times],
                     rotation=30.,
                     size='large')
        pylab.yticks(size='large')

        pylab.legend(loc=1)
        pylab.suptitle("CRPS for %s" % param_names[param])
        pylab.savefig("all_crps_%s.png" % param)
        pylab.close()
    return
예제 #10
0
def main():
    exp_names = [ "no-mm", "mm", "mod-05XP" ]
    labels = { "no-mm":"No MM", "mm":"MM", "mod-05XP":"MM + MWR05XP" }
    parameters = [ 't', 'td', 'u', 'v' ]
    param_names = { 't':"Temperature", 'td':"Dewpoint", 'u':r'$u$ Wind', 'v':r'$v$ Wind' }
    units = { 't':r'$^{\circ}$F', 'td':r'$^{\circ}$F', 'u':r'm s$^{-1}$', 'v':r'm s$^{-1}$' }

    proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs)
    map = Basemap(**proj)

    times = np.arange(14700, 18300, 300)
    base_time = datetime(2009, 6, 5, 18, 0, 0)
    dt_times = [ base_time + timedelta(seconds=int(t)) for t in times ]
    epoch = datetime(1970, 1, 1, 0, 0, 0)
    base_epoch = (base_time - epoch).total_seconds()

    obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl', 'soundings_clip.pkl']
    all_obs = loadObs(obs_file_names, dt_times, map, (goshen_1km_proj['width'], goshen_1km_proj['height']), sounding_obs=['soundings_clip.pkl'])

    ob_nums = []
    for t in times:
        obs_idxs = np.where(all_obs['time'] == base_epoch + t)[0]
        ob_nums.append(len(obs_idxs))

    figures = dict([ (p, pylab.figure()) for p in parameters ])
    axes = {}
    for p in parameters:
        pylab.figure(figures[p].number)
        axes[p] = pylab.axes((0.09, 0.12, 0.82, 0.8))

    for name in exp_names:
        crps = cPickle.load(open("%s_crps.pkl" % name, 'r'))
        for param in parameters:
            pylab.figure(figures[param].number)
            pylab.sca(axes[param])

            pylab.plot(times, crps[param], label=labels[name])

    for param in parameters:
        pylab.figure(figures[param].number)

        num_label_trans = transforms.blended_transform_factory(pylab.gca().transData, pylab.gca().transAxes)

        for t, n_obs in zip(times, ob_nums):
            pylab.text(t, 0.025, "%d" % n_obs, weight='bold', style='italic', size='xx-large', transform=num_label_trans, ha='center', bbox={'facecolor':'#ffffff', 'alpha':0.7})

        pylab.xlim(times.min(), times.max())
        lb_y, ub_y = pylab.ylim()
        pylab.ylim(0, ub_y)

        pylab.xlabel("Time (UTC)", size='large')
        pylab.ylabel("CRPS (%s)" % units[param], size='large')

        pylab.xticks(times, [ (base_time + timedelta(seconds=int(t))).strftime("%H%M") for t in times], rotation=30., size='large')
        pylab.yticks(size='large')

        pylab.legend(loc=1)
        pylab.suptitle("CRPS for %s" % param_names[param])
        pylab.savefig("all_crps_%s.png" % param)
        pylab.close()
    return
예제 #11
0
def main():
    base_time = datetime(2009, 6, 5, 18, 0, 0)
    epoch = datetime(1970, 1, 1, 0, 0, 0)
    times_seconds = range(14700, 18300, 300)
    times = [base_time + timedelta(seconds=t) for t in times_seconds]

    n_ensemble_members = 40
    exp_name = "zupdtpt"

    #
    # Set up the basemap grid
    #
    proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs)
    map = Basemap(**proj)

    #
    # Load and thin all the observed data
    #
    obs_file_names = [
        'psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl',
        'soundings_clip.pkl'
    ]
    all_obs = loadObs(obs_file_names,
                      times,
                      map,
                      (goshen_1km_proj['width'], goshen_1km_proj['height']),
                      sounding_obs=['soundings_clip.pkl'])
    print all_obs.shape[0]

    ob_first_char = np.array([id[0] for id in list(all_obs['id'])])

    num_psu_obs = len(np.where(ob_first_char == "P")[0])
    num_ttu_obs = len(
        np.where((ob_first_char == "1") | (ob_first_char == "2"))[0])
    num_asos_obs = len(np.where((ob_first_char == "K"))[0])
    num_sndg_obs = len(np.where(all_obs['obtype'] == "SNDG")[0])
    print "Number of NSSL MM obs used:", num_psu_obs
    print "Number of TTU Sticknet obs used:", num_ttu_obs
    print "Number of ASOS obs used:", num_asos_obs
    print "Number of sounding obs used:", num_sndg_obs

    all_times = [
        datetime(1970, 1, 1, 0, 0, 0) + timedelta(seconds=t)
        for t in all_obs['time']
    ]

    #
    # Convert the latitude and longitude observations to x and y on the grid.
    #
    obs_x, obs_y = map(all_obs['longitude'], all_obs['latitude'])
    obs_z = all_obs['pres'] * 100

    def getObsData(**kwargs):
        obs = np.empty(kwargs['pt'].shape,
                       dtype=[('u', np.float32), ('v', np.float32),
                              ('pt', np.float32), ('p', np.float32),
                              ('qv', np.float32)])
        obs['u'] = kwargs['u']
        obs['v'] = kwargs['v']
        obs['pt'] = kwargs['pt']
        obs['p'] = kwargs['p']
        obs['qv'] = kwargs['qv']

        return obs

    obs_vars = ['u', 'v', 't', 'td']
    ens_funcs = {'u': uFromU, 'v': vFromV, 't': tempFromPt, 'td': dewpFromQv}
    obs_funcs = {
        'u': uFromWind,
        'v': vFromWind,
        't': tempFromT,
        'td': dewpFromTd
    }

    avg_crps_values = {}
    all_crps_values = {}
    rank_histograms = {}
    all_alphas = {}
    all_betas = {}
    high_outliers = {}
    low_outliers = {}

    for time_sec, time in zip(times_seconds, times):
        #       files = glob.glob("/caps2/tsupinie/1kmf-%s/ena???.hdf%06d" % (exp_name, time_sec))

        time_idxs = np.where(all_obs['time'] == (time - epoch).total_seconds())

        #
        # Load all the ensemble members and interpolate them to the observation points.  Because of the design of my script, I'm
        # loading the all the members timestep-by-timestep, but there's no reason you can't load them all at once.  See the function
        # definition for the meaning of all the arguments.
        #
        #       ens_obs, ens_members, ens_times = loadAndInterpolateEnsemble(files, ['u', 'v', 'pt', 'p', 'qv'], getObsData, "/caps2/tsupinie/1kmf-%s/ena001.hdfgrdbas" % exp_name,
        #           {'z':obs_z[time_idxs], 'y':obs_y[time_idxs], 'x':obs_x[time_idxs]}, agl=False, wrap=True, coords='pres')

        ens_obs = loadEnsemble("/caps2/tsupinie/1kmf-%s/" % exp_name,
                               n_ensemble_members, [time_sec],
                               (['u', 'v', 'pt', 'p', 'qv'], getObsData), {
                                   'z': obs_z[time_idxs],
                                   'y': obs_y[time_idxs],
                                   'x': obs_x[time_idxs]
                               },
                               agl=False,
                               wrap=True,
                               coords='pres')

        #       print ens_obs

        #
        # All subsequent lines do the verification
        #
        for ob_var in obs_vars:
            time_crps_values = []

            ens_ob_var = ens_funcs[ob_var](
                **dict([(n, ens_obs[n][:, 0]) for n in ens_obs.dtype.names]))
            obs = obs_funcs[ob_var](**dict([(n, all_obs[n][time_idxs])
                                            for n in all_obs.dtype.names]))

            if ob_var not in rank_histograms:
                rank_histograms[ob_var] = {}
                all_crps_values[ob_var] = {}
                all_alphas[ob_var] = {}
                all_betas[ob_var] = {}
                high_outliers[ob_var] = {}
                low_outliers[ob_var] = {}

                for region in ['inflow', 'outflow', 'sounding']:
                    rank_histograms[ob_var][region] = np.zeros(
                        (ens_obs.shape[0] + 1, ), dtype=int)
                    all_crps_values[ob_var][region] = []
                    all_alphas[ob_var][region] = []
                    all_betas[ob_var][region] = []
                    high_outliers[ob_var][region] = []
                    low_outliers[ob_var][region] = []

            for idx in xrange(obs.shape[-1]):
                rank_idx = binRank(ens_ob_var[:, idx], obs[idx])
                crps, alphas, betas = CRPS(ens_ob_var[:, idx], obs[idx])
                high_outlier = heaviside(ens_ob_var[:, idx].max() - obs[idx])
                low_outlier = heaviside(ens_ob_var[:, idx].min() - obs[idx])

                for region in ['inflow', 'outflow', 'sounding']:
                    if region in inflow_stations[time_sec] and all_obs['id'][
                            time_idxs][idx] in inflow_stations[time_sec][
                                region]:
                        #                       plotCDFs(np.sort(ens_ob_var[:, idx]), obs[idx], "CDFs for Surface %s Observation %d and Forecast" % (ob_var, idx), "crps_cdf_sfc_%s_%03d.png" % (ob_var, idx))

                        rank_histograms[ob_var][region][rank_idx] += 1

                        all_crps_values[ob_var][region].append(crps)
                        all_alphas[ob_var][region].append(alphas)
                        all_betas[ob_var][region].append(betas)
                        high_outliers[ob_var][region].append(high_outlier)
                        low_outliers[ob_var][region].append(low_outlier)

                    elif region == "sounding" and all_obs['obtype'][time_idxs][
                            idx] == "SNDG":
                        #                       plotCDFs(np.sort(ens_ob_var[:, idx]), obs[idx], "CDFs for Sounding %s Observation %d and Forecast" % (ob_var, idx), "crps_cdf_sndg_%s_%03d.png" % (ob_var, idx))

                        rank_histograms[ob_var][region][rank_idx] += 1

                        all_crps_values[ob_var][region].append(crps)
                        all_alphas[ob_var][region].append(alphas)
                        all_betas[ob_var][region].append(betas)
                        high_outliers[ob_var][region].append(high_outlier)
                        low_outliers[ob_var][region].append(low_outlier)

                time_crps_values.append(crps)

            try:
                avg_crps_values[ob_var].append(
                    sum(time_crps_values) / len(time_crps_values))
            except KeyError:
                avg_crps_values[ob_var] = [
                    sum(time_crps_values) / len(time_crps_values)
                ]

    def dictmean(D):
        all_lists = []
        for val in D.itervalues():
            all_lists.extend(val)
        return np.array(all_lists).mean(axis=0)

    def dictsum(D):
        all_lists = []
        for val in D.itervalues():
            all_lists.append(val)
        return np.array(all_lists).sum(axis=0)

    def mean(L):
        return np.array(L).mean(axis=0)

    if not os.path.exists("images-%s" % exp_name):
        os.mkdir("images-%s" % exp_name, 0755)

    cPickle.dump(avg_crps_values, open("%s_crps.pkl" % exp_name, 'w'), -1)
    cPickle.dump(all_crps_values, open("%s_crps_breakdown.pkl" % exp_name,
                                       'w'), -1)
    cPickle.dump((all_alphas, all_betas, high_outliers, low_outliers),
                 open("%s_crps_pieces.pkl" % exp_name, 'w'), -1)

    for ob_var in obs_vars:
        total_obs = sum([len(v) for v in high_outliers[ob_var].itervalues()])
        print total_obs
        createVerificationGraphs(
            dictmean(all_alphas[ob_var]), dictmean(all_betas[ob_var]),
            dictmean(high_outliers[ob_var]), dictmean(low_outliers[ob_var]),
            dictsum(rank_histograms[ob_var]).astype(float) / total_obs,
            total_obs, "%s" % ob_var, exp_name)

        for region in ['inflow', 'outflow', 'sounding']:
            suffix = "%s_%s" % (ob_var, region)
            region_obs = len(high_outliers[ob_var][region])
            createVerificationGraphs(
                mean(all_alphas[ob_var][region]),
                mean(all_betas[ob_var][region]),
                mean(high_outliers[ob_var][region]),
                mean(low_outliers[ob_var][region]),
                rank_histograms[ob_var][region].astype(float) / region_obs,
                region_obs, suffix, exp_name)

        pylab.clf()
        pylab.plot(times_seconds, avg_crps_values[ob_var])

        pylab.savefig("crps_avg_%s.png" % ob_var)
    return
예제 #12
0
def main():
    base_time = datetime(2009, 6, 5, 18, 0, 0)
    epoch = datetime(1970, 1, 1, 0, 0, 0)
    base_epoch = (base_time - epoch).total_seconds()
    times_seconds = range(14700, 18300, 300)
    times = [ base_time + timedelta(seconds=t) for t in times_seconds ]

    bounds = (slice(100, 180), slice(90, 170))
    rev_bounds = [ 0 ]
    rev_bounds.extend(bounds[::-1])
    rev_bounds = tuple(rev_bounds)

    proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs, bounds=bounds)
    map = Basemap(**proj)

    obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl', 'soundings_clip.pkl']
    all_obs = loadObs(obs_file_names, times, map, (goshen_1km_proj['width'], goshen_1km_proj['height']), sounding_obs=['soundings_clip.pkl'])

    refl_base = "hdf/KCYS/1km/goshen.hdfrefl2d"
    refl_times = np.array([ int(f[-6:]) for f in glob.glob("%s??????" % refl_base) ])
    refl_keep_times = []
    refl_data = {}

    for tt in times_seconds:
        idx = np.argmin(np.abs(refl_times - tt))
        if refl_times[idx] > tt and idx > 0:
            idx -= 1

        file_name = "%s%06d" % (refl_base, refl_times[idx])
        hdf = nio.open_file(file_name, mode='r', format='hdf')
        refl_keep_times.append(refl_times[idx])
        refl_data[tt] = hdf.variables['refl2d'][rev_bounds]

    for time, reg in inflow_stations.iteritems():
        pylab.figure()

        gs_x, gs_y = goshen_1km_gs
        nx, ny = refl_data[time].shape
        xs, ys = np.meshgrid(gs_x * np.arange(nx), gs_y * np.arange(ny))
        pylab.contourf(xs, ys, refl_data[time], levels=np.arange(10, 80, 10))

        for region, stations in reg.iteritems():
            if region != 'sounding':
                for station in stations:
                    idxs = np.where((all_obs['id'] == station) & (all_obs['time'] == base_epoch + time))
                    ob_xs, ob_ys = map(all_obs['longitude'][idxs], all_obs['latitude'][idxs])

                    if region == 'inflow': color='r'
                    elif region == 'outflow': color='b'

                    wdir = all_obs['wind_dir'][idxs]
                    wspd = all_obs['wind_spd'][idxs]
                    u = -wspd * np.sin(wdir * np.pi / 180.) * 1.94
                    v = -wspd * np.cos(wdir * np.pi / 180.) * 1.94

                    pylab.plot(ob_xs, ob_ys, "%so" % color)
                    pylab.barbs(ob_xs, ob_ys, u, v)

        drawPolitical(map, scale_len=10)

        pylab.savefig("inflow_stations_%06d.png" % time)
        pylab.close()

    return
예제 #13
0
def main():
    base_time = datetime(2009, 6, 5, 18, 0, 0)
    epoch = datetime(1970, 1, 1, 0, 0, 0)
    base_epoch = (base_time - epoch).total_seconds()
    times_seconds = range(14700, 18300, 300)
    times = [base_time + timedelta(seconds=t) for t in times_seconds]

    bounds = (slice(100, 180), slice(90, 170))
    rev_bounds = [0]
    rev_bounds.extend(bounds[::-1])
    rev_bounds = tuple(rev_bounds)

    proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs, bounds=bounds)
    map = Basemap(**proj)

    obs_file_names = [
        'psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl',
        'soundings_clip.pkl'
    ]
    all_obs = loadObs(obs_file_names,
                      times,
                      map,
                      (goshen_1km_proj['width'], goshen_1km_proj['height']),
                      sounding_obs=['soundings_clip.pkl'])

    refl_base = "hdf/KCYS/1km/goshen.hdfrefl2d"
    refl_times = np.array(
        [int(f[-6:]) for f in glob.glob("%s??????" % refl_base)])
    refl_keep_times = []
    refl_data = {}

    for tt in times_seconds:
        idx = np.argmin(np.abs(refl_times - tt))
        if refl_times[idx] > tt and idx > 0:
            idx -= 1

        file_name = "%s%06d" % (refl_base, refl_times[idx])
        hdf = nio.open_file(file_name, mode='r', format='hdf')
        refl_keep_times.append(refl_times[idx])
        refl_data[tt] = hdf.variables['refl2d'][rev_bounds]

    for time, reg in inflow_stations.iteritems():
        pylab.figure()

        gs_x, gs_y = goshen_1km_gs
        nx, ny = refl_data[time].shape
        xs, ys = np.meshgrid(gs_x * np.arange(nx), gs_y * np.arange(ny))
        pylab.contourf(xs, ys, refl_data[time], levels=np.arange(10, 80, 10))

        for region, stations in reg.iteritems():
            if region != 'sounding':
                for station in stations:
                    idxs = np.where((all_obs['id'] == station)
                                    & (all_obs['time'] == base_epoch + time))
                    ob_xs, ob_ys = map(all_obs['longitude'][idxs],
                                       all_obs['latitude'][idxs])

                    if region == 'inflow': color = 'r'
                    elif region == 'outflow': color = 'b'

                    wdir = all_obs['wind_dir'][idxs]
                    wspd = all_obs['wind_spd'][idxs]
                    u = -wspd * np.sin(wdir * np.pi / 180.) * 1.94
                    v = -wspd * np.cos(wdir * np.pi / 180.) * 1.94

                    pylab.plot(ob_xs, ob_ys, "%so" % color)
                    pylab.barbs(ob_xs, ob_ys, u, v)

        drawPolitical(map, scale_len=10)

        pylab.savefig("inflow_stations_%06d.png" % time)
        pylab.close()

    return
예제 #14
0
def main():
    np.seterr(all='ignore')
    ap = argparse.ArgumentParser()
    ap.add_argument('--data-path', dest='data_path', default="/caps2/tsupinie/")
    ap.add_argument('--exp-name', dest='exp_name', required=True)

    args = ap.parse_args()

    exp_base = args.data_path
    exp_name = args.exp_name
    n_ensemble_members = 40

#   base_time = datetime(2009, 6, 5, 18, 0, 0)
#   epoch = datetime(1970, 1, 1, 0, 0, 0)
#   base_epoch = (base_time - epoch).total_seconds()
#
#   sec_times = np.arange(14400, 18300, 300)
#   times = [ base_time + timedelta(seconds=int(t)) for t in sec_times ]
    temp = goshen_1km_temporal(start=14400)

    bounds = (slice(100, 180), slice(90, 170))
#   bounds = (slice(None), slice(None))

#   proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs, bounds)
#   map = Basemap(**proj)
    grid = goshen_1km_grid(bounds=bounds)

    obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl']
    all_obs = loadObs(obs_file_names, temp.getDatetimes(aslist=True), grid, grid.getWidthHeight())

    obs_x, obs_y = grid(all_obs['longitude'], all_obs['latitude'])
    obs_z = all_obs['elevation']

    grdbas_file = "%s/1kmf-%s/ena001.hdfgrdbas" % (exp_base, exp_name)
    grdbas = nio.open_file(grdbas_file, mode='r', format='hdf')
    y_axis = decompressVariable(grdbas.variables['y'])[bounds[1]]
    x_axis = decompressVariable(grdbas.variables['x'])[bounds[0]]

    y_axis = y_axis - y_axis[0]
    x_axis = x_axis - x_axis[0]

#   fcst_files = glob.glob("%s/1km-control-%s/ena???.hdf014[47]00" % (exp_base, exp_name))
#   fcst_files.extend(glob.glob("%s/1km-control-%s/ena???.hdf01[5678]*" % (exp_base, exp_name)))

#   ens, ens_members, ens_times = loadAndInterpolateEnsemble(fcst_files, ['u', 'v', 'pt', 'p', 'qv', 'qr', 'qs', 'qh'], getTempDewpRefl, grdbas_file, 
#       {'z':10}, agl=True, wrap=True)

    ens = loadEnsemble("/caps2/tsupinie/1kmf-%s/" % exp_name, n_ensemble_members, temp.getTimes(), (['u', 'v', 'pt', 'p', 'qv', 'qr', 'qs', 'qh'], getTempDewpRefl), {'sigma':2}, agl=True, wrap=True) 
#   ens = ens[:, :, 2, :, :]

    ens_slice = [ slice(None), slice(None) ]
    ens_slice.extend(bounds[::-1])

    ens_mean = np.empty(ens.shape[1:], dtype=[('t', np.float32), ('td', np.float32), ('u', np.float32), ('v', np.float32)])
    for var in ens_mean.dtype.fields.iterkeys():
        ens_mean[var] = ens[var].mean(axis=0)

    cPickle.dump(ens_mean, open("cold_pool_1kmf-%s.pkl" % exp_name, 'w'), -1)

    ens = ens[tuple(ens_slice)]

    ens_refl = np.maximum(0, ens['refl'].mean(axis=0)) #probMatchMean(ens['refl'])

    ens_obs = np.empty(ens.shape[:1] + all_obs.shape, dtype=ens_mean.dtype)

    for lde in xrange(ens.shape[0]):
        for var in ens_obs.dtype.fields.iterkeys():
            for ob_idx, (ob_x, ob_y) in enumerate(zip(obs_x, obs_y)):
                wdt = temp.getEpochs(aslist=True).index(int(all_obs['nom_time'][ob_idx]))
                ens_obs[var][lde, ob_idx] = interpolate(ens[var][lde, wdt, np.newaxis], {'y':y_axis, 'x':x_axis}, {'y':ob_y, 'x':ob_y})

#   print ens_obs.shape
    ens_obs_std = np.empty(ens_obs.shape[1:], dtype=ens_obs.dtype)
    ens_obs_mean = np.empty(ens_obs.shape[1:], dtype=ens_obs.dtype)

    for var in ens_obs_std.dtype.fields.iterkeys():
        ens_obs_std[var] = ens_obs[var].std(ddof=1, axis=0)
        ens_obs_mean[var] = ens_obs[var].mean(axis=0)

    cPickle.dump(ens_obs_mean, open("cold_pool_obs_1kmf-%s.pkl" % exp_name, 'w'), -1)

#   print ens_obs_std.shape

#   for wdt, (time_sec, time_epoch) in enumerate(zip(temp, temp.getEpochs())):
#       time_ob_idxs = np.where(all_obs['time'] == time_epoch)[0]
#
#       ob_locations = (all_obs[time_ob_idxs]['longitude'], all_obs[time_ob_idxs]['latitude'])
#
#       temp_K = 5. / 9. * (all_obs[time_ob_idxs]['temp'] - 32) + 273.15
#       dewp_K = 5. / 9. * (all_obs[time_ob_idxs]['dewp'] - 32) + 273.15
#
#       wdir = all_obs[time_ob_idxs]['wind_dir']
#       wspd = all_obs[time_ob_idxs]['wind_spd']
#
#       u = -wspd * np.sin(np.radians(wdir))
#       v = -wspd * np.cos(np.radians(wdir))
#
#       print "Plotting temperature ..."
#       plotComparison(ens_mean['t'][wdt], ens_obs_mean['t'][time_ob_idxs], ens_obs_std['t'][time_ob_idxs], temp_K, ob_locations, ens_refl[wdt], grid, np.arange(289., 298., 1.), matplotlib.cm.get_cmap('Blues_r'),
#           "Ensemble Mean/Obs Comparison at Time %s" % time_sec, "cold_pool_t_%s.png" % time_sec)
#       print "Plotting dewpoint ..."
#       plotComparison(ens_mean['td'][wdt], ens_obs_mean['td'][time_ob_idxs], ens_obs_std['td'][time_ob_idxs], dewp_K, ob_locations, ens_refl[wdt], grid, np.arange(277., 290., 1.), matplotlib.cm.get_cmap('YlGn'),
#           "Ensemble Mean/Obs Comparison at Time %s" % time_sec, "cold_pool_td_%s.png" % time_sec)
#
#       print "Plotting u ..."
#       plotComparison(ens_mean['u'][wdt], ens_obs_mean['u'][time_ob_idxs], ens_obs_std['u'][time_ob_idxs], u, ob_locations, ens_refl[wdt], grid, np.arange(-20., 22., 2.), matplotlib.cm.get_cmap('RdBu_r'),
#           "Ensemble Mean/Obs Comparison at Time %s" % time_sec, "cold_pool_u_%s.png" % time_sec)
#       print "Plotting v ..."
#       plotComparison(ens_mean['v'][wdt], ens_obs_mean['v'][time_ob_idxs], ens_obs_std['v'][time_ob_idxs], v, ob_locations, ens_refl[wdt], grid, np.arange(-20., 22., 2.), matplotlib.cm.get_cmap('RdBu_r'),
#           "Ensemble Mean/Obs Comparison at Time %s" % time_sec, "cold_pool_v_%s.png" % time_sec)
    return
예제 #15
0
def main():
    base_time = datetime(2009, 6, 5, 18, 0, 0)
    epoch = datetime(1970, 1, 1, 0, 0, 0)
    times_seconds = range(14700, 18300, 300)
    times = [ base_time + timedelta(seconds=t) for t in times_seconds ]

    proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs)
    map = Basemap(**proj)

    sounding_obs = loadObs(['soundings.pkl'], times, map, sounding_obs=['soundings.pkl'])

    obs_x, obs_y = map(sounding_obs['longitude'], sounding_obs['latitude'])
    obs_z = sounding_obs['elevation']

    start_time = floor(sounding_obs['time'].min() / 300) * 300 - (base_time - epoch).total_seconds()
    sonde_ids = np.unique1d(sounding_obs['id'])

    sondes = {}
    for id in sonde_ids:
        sondes[id] = {'obs':[], 'ens':[] }

    for time in times_seconds[times_seconds.index(start_time):]:
        time_epoch = time + (base_time - epoch).total_seconds()
#       time_base = (epoch + timedelta(seconds=time) - base_time).total_seconds()
        files = glob.glob("/caps1/tsupinie/1km-control-20120712/ena???.hdf%06d" % time)

        round_times = np.round(sounding_obs['time'] / 300) * 300
        time_idxs = np.where(round_times == time_epoch)

        ens_obs, ens_members, ens_times = loadAndInterpolateEnsemble(files, ['u', 'v', 'pt', 'p', 'qv'], getObsData, "/caps1/tsupinie/1km-control-20120712/ena001.hdfgrdbas", 
            {'z':obs_z[time_idxs], 'y':obs_y[time_idxs], 'x':obs_x[time_idxs]}, agl=False, wrap=True)
        
        ens_obs = np.transpose(ens_obs, axes=(2, 0, 1))

        for sonde in sonde_ids:
            sonde_idxs = np.where(sounding_obs['id'][time_idxs] == sonde)

            sondes[sonde]['obs'].extend(sounding_obs[time_idxs[0][sonde_idxs]])
            sondes[sonde]['ens'].extend([ e[:,0] for e in ens_obs[sonde_idxs] ])
    
    for sonde in sonde_ids:
        ens_obs = np.array(sondes[sonde]['ens'], dtype=sondes[sonde]['ens'][0].dtype)
        ens_temp = theta2Temperature(pt=ens_obs['pt'], p=ens_obs['p'])
        ens_dewp = qv2Dewpoint(qv=ens_obs['qv'], p=ens_obs['p'])

        data_obs = np.array(sondes[sonde]['obs'], dtype=sondes[sonde]['obs'][0].dtype)
        order = np.argsort(data_obs['time'])

        time = data_obs['time'][order] - (base_time - epoch).total_seconds()
        obs_temp = data_obs['temp'][order] + 273.15
        obs_dewp = data_obs['dewp'][order] + 273.15

#       pylab.figure(figsize=(8, 10), dpi=100)
#       pylab.axes((0, 0, 1, 1))

        pylab.figure()

        for ens in xrange(ens_obs.shape[1]):
#           plotSounding(None, t=ens_temp[:, ens][order], td=ens_dewp[:, ens][order], p=ens_obs['p'][:, ens][order] / 100., u=ens_obs['u'][:, ens][order], v=ens_obs['v'][:, ens][order])
            pylab.subplot(211)
            pylab.plot(time, ens_temp[:, ens][order], 'r-', linewidth=0.5)
            pylab.plot(time, ens_dewp[:, ens][order], 'g-', linewidth=0.5)

            pylab.subplot(212)
            pylab.plot(time, ens_obs['p'][:, ens][order] / 100., 'b-', linewidth=0.5)

#       plotSounding(None, t=obs_temp, td=obs_dewp, p=data_obs['pres'][order], u=np.ones(order.shape), v=np.zeros(order.shape))
        pylab.subplot(211)
        pylab.plot(time, obs_temp, 'k-', linewidth=1.0)
        pylab.plot(time, obs_dewp, 'k-', linewidth=1.0)

        pylab.subplot(212)
        pylab.plot(time, data_obs['pres'][order], 'k-', linewidth=1.0)

        sonde_name = sonde.replace('/', '_')
        pylab.savefig("sonde_swath_%s.png" % sonde_name)

        pylab.close()       
    return
예제 #16
0
def main():
    times_sec = range(11100, 14700, 300)
    run_base_time = datetime(2009, 6, 5, 18, 0, 0)
    times_dt = [run_base_time + timedelta(seconds=t) for t in times_sec]
    radar_elev, radar_lat, radar_lon = 1883, 41.151944, -104.806111
    proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs)

    variables = ['pt']
    interp_height = 25

    if len(variables) == 2:
        var1, var2 = variables
    else:
        var1 = variables[0]
        var2 = variables[0]

    map = Basemap(**proj)
    radar_x, radar_y = map(radar_lon, radar_lat)

    obs_file_names = [
        'psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl',
        'soundings_da.pkl'
    ]
    all_obs = loadObs(obs_file_names,
                      times_dt,
                      map,
                      sounding_obs=['soundings_da.pkl'])

    print all_obs

    forecast_base = "/caps1/tsupinie/1km-control-20120712/"
    grdbas_file_name = "/caps1/tsupinie/1km-control-20120712/ena001.hdfgrdbas"

    enf_files = glob.glob("%s/enf???.hdf0*" % forecast_base)

    enf_data, ens_members, ens_times = loadAndInterpolateEnsemble(
        enf_files,
        variables,
        toRecArray,
        grdbas_file_name,
        points=None,
        agl=True,
        wrap=False)

    refl_ens_mean, ens_refl, ens_members, ens_times = loadAndInterpolateEnsemble(
        enf_files, ['pt', 'p', 'qr', 'qs', 'qh'],
        computeReflectivity,
        "/caps1/tsupinie/1km-control-20120712/ena001.hdfgrdbas",
        {'z': interp_height},
        agl=True,
        wrap=False,
        aggregator=lambda x: np.mean(x, axis=0))

    grdbas = nio.open_file(grdbas_file_name, mode='r', format='hdf')
    axes_msl = {
        'z': grdbas.variables['zp'][:],
        'y': grdbas.variables['y'][:],
        'x': grdbas.variables['x'][:]
    }
    axes_agl = {
        'z': _makeZCoordsAGL(grdbas.variables['zp'][:]),
        'y': grdbas.variables['y'][:],
        'x': grdbas.variables['x'][:]
    }

    for wdt, time_dt in enumerate(times_dt):
        print "Working on time %s" % str(time_dt)
        time_idxs = np.where(
            all_obs['time'] == (time_dt -
                                datetime(1970, 1, 1, 0, 0, 0)).total_seconds())
        cov = loadAndComputeCovariance(all_obs[time_idxs], enf_data[var1][:,
                                                                          wdt],
                                       enf_data[var2][:, wdt], map, axes_msl,
                                       False)
        cor = loadAndComputeCovariance(all_obs[time_idxs], enf_data[var1][:,
                                                                          wdt],
                                       enf_data[var2][:, wdt], map, axes_msl,
                                       True)

        for ob_idx in xrange(cov.shape[0]):
            ob = all_obs[time_idxs[0][ob_idx]]
            interp_cov = interpolate(cov[ob_idx], axes_agl,
                                     {'z': interp_height})
            interp_cor = interpolate(cor[ob_idx], axes_agl,
                                     {'z': interp_height})

            roi = 50
            if ob['obtype'] == "SA":
                roi = 300
            elif ob['obtype'] == "SNDG":
                obs_x, obs_y = map(ob['longitude'], ob['latitude'])
                interp_height_msl = interpolate(axes_msl['z'],
                                                axes_msl, {
                                                    'y': obs_y,
                                                    'x': obs_x
                                                },
                                                wrap=True)[0]
                print interp_height_msl

                r_h = 150.
                r_v = 6.

                z_diff = (interp_height_msl - ob['elevation']) / 1000.
                if np.abs(z_diff) > r_v:
                    roi = 0
                else:
                    roi = r_h * np.sqrt(1 - (z_diff / r_v)**2)

                print roi
            print np.nanmin(interp_cov), np.nanmax(interp_cov)

            plotCov(interp_cov,
                    refl_ens_mean[wdt],
                    (ob['elevation'], ob['latitude'], ob['longitude']),
                    map,
                    goshen_1km_gs,
                    "cov_%06d_ob%02d.png" % (times_sec[wdt], ob_idx),
                    roi=roi,
                    normalize=(-2.0, 2.0))
            plotCov(interp_cor,
                    refl_ens_mean[wdt],
                    (ob['elevation'], ob['latitude'], ob['longitude']),
                    map,
                    goshen_1km_gs,
                    "cor_%06d_ob%02d.png" % (times_sec[wdt], ob_idx),
                    roi=roi)

    return
예제 #17
0
파일: crps.py 프로젝트: tsupinie/research
def main():
    base_time = datetime(2009, 6, 5, 18, 0, 0)
    epoch = datetime(1970, 1, 1, 0, 0, 0)
    times_seconds = range(14700, 18300, 300)
    times = [ base_time + timedelta(seconds=t) for t in times_seconds ]

    n_ensemble_members = 40
    exp_name = "zupdtpt"

    #
    # Set up the basemap grid
    #
    proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs)
    map = Basemap(**proj)

    #
    # Load and thin all the observed data
    #
    obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl', 'soundings_clip.pkl']
    all_obs = loadObs(obs_file_names, times, map, (goshen_1km_proj['width'], goshen_1km_proj['height']), sounding_obs=['soundings_clip.pkl'])
    print all_obs.shape[0]

    ob_first_char = np.array([ id[0] for id in list(all_obs['id']) ])

    num_psu_obs = len(np.where(ob_first_char == "P")[0])
    num_ttu_obs = len(np.where((ob_first_char == "1") | (ob_first_char == "2"))[0])
    num_asos_obs = len(np.where((ob_first_char == "K"))[0])
    num_sndg_obs = len(np.where(all_obs['obtype'] == "SNDG")[0])
    print "Number of NSSL MM obs used:", num_psu_obs
    print "Number of TTU Sticknet obs used:", num_ttu_obs
    print "Number of ASOS obs used:", num_asos_obs
    print "Number of sounding obs used:", num_sndg_obs

    all_times = [ datetime(1970, 1, 1, 0, 0, 0) + timedelta(seconds=t) for t in all_obs['time'] ]

    #
    # Convert the latitude and longitude observations to x and y on the grid.
    #
    obs_x, obs_y = map(all_obs['longitude'], all_obs['latitude'])
    obs_z = all_obs['pres'] * 100

    def getObsData(**kwargs): 
        obs = np.empty(kwargs['pt'].shape, dtype=[('u', np.float32), ('v', np.float32), ('pt', np.float32), ('p', np.float32), ('qv', np.float32)])
        obs['u'] = kwargs['u']
        obs['v'] = kwargs['v']
        obs['pt'] = kwargs['pt']
        obs['p'] = kwargs['p']
        obs['qv'] = kwargs['qv']

        return obs

    obs_vars = ['u', 'v', 't', 'td']
    ens_funcs = { 'u':uFromU, 'v':vFromV, 't':tempFromPt, 'td':dewpFromQv }
    obs_funcs = { 'u':uFromWind, 'v':vFromWind, 't':tempFromT, 'td':dewpFromTd }

    avg_crps_values = { }
    all_crps_values = { }
    rank_histograms = { }
    all_alphas = { }
    all_betas = { }    
    high_outliers = { }
    low_outliers = { }

    for time_sec, time in zip(times_seconds, times):
#       files = glob.glob("/caps2/tsupinie/1kmf-%s/ena???.hdf%06d" % (exp_name, time_sec))

        time_idxs = np.where(all_obs['time'] == (time - epoch).total_seconds())

        #
        # Load all the ensemble members and interpolate them to the observation points.  Because of the design of my script, I'm
        # loading the all the members timestep-by-timestep, but there's no reason you can't load them all at once.  See the function
        # definition for the meaning of all the arguments.
        #
#       ens_obs, ens_members, ens_times = loadAndInterpolateEnsemble(files, ['u', 'v', 'pt', 'p', 'qv'], getObsData, "/caps2/tsupinie/1kmf-%s/ena001.hdfgrdbas" % exp_name, 
#           {'z':obs_z[time_idxs], 'y':obs_y[time_idxs], 'x':obs_x[time_idxs]}, agl=False, wrap=True, coords='pres')

        ens_obs = loadEnsemble("/caps2/tsupinie/1kmf-%s/" % exp_name, n_ensemble_members, [ time_sec ], (['u', 'v', 'pt', 'p', 'qv'], getObsData), { 'z':obs_z[time_idxs], 'y':obs_y[time_idxs], 'x':obs_x[time_idxs] }, agl=False, wrap=True, coords='pres')

#       print ens_obs

        #
        # All subsequent lines do the verification
        #
        for ob_var in obs_vars:
            time_crps_values = []

            ens_ob_var = ens_funcs[ob_var](**dict([ (n, ens_obs[n][:, 0]) for n in ens_obs.dtype.names ]))
            obs = obs_funcs[ob_var](**dict([ (n, all_obs[n][time_idxs]) for n in all_obs.dtype.names ]))

            if ob_var not in rank_histograms:
                rank_histograms[ob_var] = {}
                all_crps_values[ob_var] = {}
                all_alphas[ob_var] = {}
                all_betas[ob_var] = {}
                high_outliers[ob_var] = {}
                low_outliers[ob_var] = {}
               
                for region in [ 'inflow', 'outflow', 'sounding' ]:
                    rank_histograms[ob_var][region] = np.zeros((ens_obs.shape[0] + 1,), dtype=int)
                    all_crps_values[ob_var][region] = []
                    all_alphas[ob_var][region] = []
                    all_betas[ob_var][region] = []
                    high_outliers[ob_var][region] = []
                    low_outliers[ob_var][region] = []

            for idx in xrange(obs.shape[-1]):
                rank_idx = binRank(ens_ob_var[:, idx], obs[idx])
                crps, alphas, betas = CRPS(ens_ob_var[:, idx], obs[idx])
                high_outlier = heaviside(ens_ob_var[:, idx].max() - obs[idx])
                low_outlier = heaviside(ens_ob_var[:, idx].min() - obs[idx])

                for region in [ 'inflow', 'outflow', 'sounding' ]:
                    if region in inflow_stations[time_sec] and all_obs['id'][time_idxs][idx] in inflow_stations[time_sec][region]:
#                       plotCDFs(np.sort(ens_ob_var[:, idx]), obs[idx], "CDFs for Surface %s Observation %d and Forecast" % (ob_var, idx), "crps_cdf_sfc_%s_%03d.png" % (ob_var, idx))

                        rank_histograms[ob_var][region][rank_idx] += 1

                        all_crps_values[ob_var][region].append(crps)
                        all_alphas[ob_var][region].append(alphas)
                        all_betas[ob_var][region].append(betas)
                        high_outliers[ob_var][region].append(high_outlier)
                        low_outliers[ob_var][region].append(low_outlier)

                    elif region == "sounding" and all_obs['obtype'][time_idxs][idx] == "SNDG":
#                       plotCDFs(np.sort(ens_ob_var[:, idx]), obs[idx], "CDFs for Sounding %s Observation %d and Forecast" % (ob_var, idx), "crps_cdf_sndg_%s_%03d.png" % (ob_var, idx))

                        rank_histograms[ob_var][region][rank_idx] += 1

                        all_crps_values[ob_var][region].append(crps)
                        all_alphas[ob_var][region].append(alphas)
                        all_betas[ob_var][region].append(betas)
                        high_outliers[ob_var][region].append(high_outlier)
                        low_outliers[ob_var][region].append(low_outlier)

                time_crps_values.append(crps)

            try:
                avg_crps_values[ob_var].append(sum(time_crps_values) / len(time_crps_values))
            except KeyError:
                avg_crps_values[ob_var] = [ sum(time_crps_values) / len(time_crps_values) ]

    def dictmean(D):
        all_lists = []
        for val in D.itervalues(): all_lists.extend(val)
        return np.array(all_lists).mean(axis=0)

    def dictsum(D):
        all_lists = []
        for val in D.itervalues(): all_lists.append(val)
        return np.array(all_lists).sum(axis=0)

    def mean(L):
        return np.array(L).mean(axis=0)

    if not os.path.exists("images-%s" % exp_name):
        os.mkdir("images-%s" % exp_name, 0755)

    cPickle.dump(avg_crps_values, open("%s_crps.pkl" % exp_name, 'w'), -1)
    cPickle.dump(all_crps_values, open("%s_crps_breakdown.pkl" % exp_name, 'w'), -1)
    cPickle.dump((all_alphas, all_betas, high_outliers, low_outliers), open("%s_crps_pieces.pkl" % exp_name, 'w'), -1)

    for ob_var in obs_vars:
        total_obs = sum([ len(v) for v in high_outliers[ob_var].itervalues()  ])
        print total_obs
        createVerificationGraphs(dictmean(all_alphas[ob_var]), dictmean(all_betas[ob_var]), dictmean(high_outliers[ob_var]), dictmean(low_outliers[ob_var]), dictsum(rank_histograms[ob_var]).astype(float) / total_obs, total_obs, "%s" % ob_var, exp_name)

        for region in [ 'inflow', 'outflow',  'sounding' ]:
            suffix = "%s_%s" % (ob_var, region)
            region_obs = len(high_outliers[ob_var][region])
            createVerificationGraphs(mean(all_alphas[ob_var][region]), mean(all_betas[ob_var][region]), mean(high_outliers[ob_var][region]), mean(low_outliers[ob_var][region]), rank_histograms[ob_var][region].astype(float) / region_obs, region_obs, suffix, exp_name)

        pylab.clf()
        pylab.plot(times_seconds, avg_crps_values[ob_var])

        pylab.savefig("crps_avg_%s.png" % ob_var)
    return
예제 #18
0
def main():
    base_path = "/caps2/tsupinie/"
    ap = argparse.ArgumentParser()
    ap.add_argument('--exp-name', dest='exp_name', required=True)

    args = ap.parse_args()

    n_ens_members = 40
    exp_name = args.exp_name

    bounds_obs = (slice(100, 180), slice(90, 170))
    grid_obs = goshen_1km_grid(bounds=bounds_obs)

    bounds = (slice(None), slice(None))
    grid = goshen_1km_grid(bounds=bounds)

    temp = goshen_1km_temporal(start=14400)

    obs_file_names = ['psu_straka_mesonet.pkl', 'ttu_sticknet.pkl', 'asos.pkl']
    all_obs = loadObs(obs_file_names, temp.getDatetimes(aslist=True), grid_obs,
                      grid_obs.getWidthHeight())
    obs_xy = np.vstack(grid(all_obs['longitude'], all_obs['latitude'])).T

    ens = loadEnsemble("/caps2/tsupinie/%s/" % exp_name,
                       n_ens_members,
                       temp.getTimes(),
                       (['u', 'v', 'pt', 'p', 'qv'], getTempDewpRefl),
                       {'sigma': 2},
                       agl=True,
                       wrap=True)

    grid_xs, grid_ys = grid.getXY()
    obs_t_verif = []
    for wdt, (time_sec, time_epoch) in enumerate(zip(temp, temp.getEpochs())):

        try:
            mo = ARPSModelObsFile("%s/%s/KCYSan%06d" %
                                  (base_path, exp_name, time_sec))
        except AssertionError:
            mo = ARPSModelObsFile("%s/%s/KCYSan%06d" %
                                  (base_path, exp_name, time_sec),
                                  mpi_config=(2, 12))
        except:
            print "Can't load reflectivity ..."
            mo = {'Z': np.zeros((1, 255, 255), dtype=np.float32)}

        time_ob_idxs = np.where(all_obs['nom_time'] == time_epoch)[0]

        time_obs = all_obs[time_ob_idxs]
        time_obs_xy = obs_xy[time_ob_idxs]

        obs_intrp = griddata(time_obs_xy,
                             5. / 9. * (time_obs['temp'] - 32) + 273.15,
                             (grid_xs, grid_ys))
        print np.isfinite(obs_intrp).sum()

        pylab.figure()

        pylab.contourf(grid_xs,
                       grid_ys,
                       ens['t'][:, wdt].mean(axis=0)[bounds] - obs_intrp,
                       levels=np.arange(-6, 6.5, 0.5),
                       cmap=matplotlib.cm.get_cmap("RdBu_r"))
        pylab.colorbar()

        pylab.contour(grid_xs,
                      grid_ys,
                      mo['Z'][0][tuple(reversed(bounds))],
                      levels=np.arange(10, 80, 10),
                      colors='k')

        grid.drawPolitical()

        pylab.savefig("obs_verif/obs_%s_t_grid_%06d.png" %
                      (exp_name[5:], time_sec))
        pylab.close()
        obs_t_verif.append(ens['t'][:, wdt].mean(axis=0) - obs_intrp)

    cPickle.dump(np.array(obs_t_verif),
                 open("obs_verif/obs_verif_%s.pkl" % exp_name, 'w'), -1)
    return
예제 #19
0
def main():
    base_time = datetime(2009, 6, 5, 18, 0, 0)
    epoch = datetime(1970, 1, 1, 0, 0, 0)
    times_seconds = range(14700, 18300, 300)
    times = [base_time + timedelta(seconds=t) for t in times_seconds]

    proj = setupMapProjection(goshen_1km_proj, goshen_1km_gs)
    map = Basemap(**proj)

    sounding_obs = loadObs(['soundings.pkl'],
                           times,
                           map,
                           sounding_obs=['soundings.pkl'])

    obs_x, obs_y = map(sounding_obs['longitude'], sounding_obs['latitude'])
    obs_z = sounding_obs['elevation']

    start_time = floor(sounding_obs['time'].min() / 300) * 300 - (
        base_time - epoch).total_seconds()
    sonde_ids = np.unique1d(sounding_obs['id'])

    sondes = {}
    for id in sonde_ids:
        sondes[id] = {'obs': [], 'ens': []}

    for time in times_seconds[times_seconds.index(start_time):]:
        time_epoch = time + (base_time - epoch).total_seconds()
        #       time_base = (epoch + timedelta(seconds=time) - base_time).total_seconds()
        files = glob.glob(
            "/caps1/tsupinie/1km-control-20120712/ena???.hdf%06d" % time)

        round_times = np.round(sounding_obs['time'] / 300) * 300
        time_idxs = np.where(round_times == time_epoch)

        ens_obs, ens_members, ens_times = loadAndInterpolateEnsemble(
            files, ['u', 'v', 'pt', 'p', 'qv'],
            getObsData,
            "/caps1/tsupinie/1km-control-20120712/ena001.hdfgrdbas", {
                'z': obs_z[time_idxs],
                'y': obs_y[time_idxs],
                'x': obs_x[time_idxs]
            },
            agl=False,
            wrap=True)

        ens_obs = np.transpose(ens_obs, axes=(2, 0, 1))

        for sonde in sonde_ids:
            sonde_idxs = np.where(sounding_obs['id'][time_idxs] == sonde)

            sondes[sonde]['obs'].extend(sounding_obs[time_idxs[0][sonde_idxs]])
            sondes[sonde]['ens'].extend([e[:, 0] for e in ens_obs[sonde_idxs]])

    for sonde in sonde_ids:
        ens_obs = np.array(sondes[sonde]['ens'],
                           dtype=sondes[sonde]['ens'][0].dtype)
        ens_temp = theta2Temperature(pt=ens_obs['pt'], p=ens_obs['p'])
        ens_dewp = qv2Dewpoint(qv=ens_obs['qv'], p=ens_obs['p'])

        data_obs = np.array(sondes[sonde]['obs'],
                            dtype=sondes[sonde]['obs'][0].dtype)
        order = np.argsort(data_obs['time'])

        time = data_obs['time'][order] - (base_time - epoch).total_seconds()
        obs_temp = data_obs['temp'][order] + 273.15
        obs_dewp = data_obs['dewp'][order] + 273.15

        #       pylab.figure(figsize=(8, 10), dpi=100)
        #       pylab.axes((0, 0, 1, 1))

        pylab.figure()

        for ens in xrange(ens_obs.shape[1]):
            #           plotSounding(None, t=ens_temp[:, ens][order], td=ens_dewp[:, ens][order], p=ens_obs['p'][:, ens][order] / 100., u=ens_obs['u'][:, ens][order], v=ens_obs['v'][:, ens][order])
            pylab.subplot(211)
            pylab.plot(time, ens_temp[:, ens][order], 'r-', linewidth=0.5)
            pylab.plot(time, ens_dewp[:, ens][order], 'g-', linewidth=0.5)

            pylab.subplot(212)
            pylab.plot(time,
                       ens_obs['p'][:, ens][order] / 100.,
                       'b-',
                       linewidth=0.5)


#       plotSounding(None, t=obs_temp, td=obs_dewp, p=data_obs['pres'][order], u=np.ones(order.shape), v=np.zeros(order.shape))
        pylab.subplot(211)
        pylab.plot(time, obs_temp, 'k-', linewidth=1.0)
        pylab.plot(time, obs_dewp, 'k-', linewidth=1.0)

        pylab.subplot(212)
        pylab.plot(time, data_obs['pres'][order], 'k-', linewidth=1.0)

        sonde_name = sonde.replace('/', '_')
        pylab.savefig("sonde_swath_%s.png" % sonde_name)

        pylab.close()
    return