def create_M36_M09_lut():
    ''' Create a NN look-up table from the M09 to the M36 grid'''

    fout = '/Users/u0116961/data_sets/GEOSldas_runs/LUT_M36_M09_US.csv'

    fname36 = '/Users/u0116961/data_sets/GEOSldas_runs/NLv4_M36_US_SMAP_TB_OL.ldas_tilecoord.bin'
    fname09 = '/Users/u0116961/data_sets/GEOSldas_runs/US_M09_SMAP_OL.ldas_tilecoord.bin'

    io = LDAS_io(exp='US_M36_SMAP_OL')
    dtype, hdr, length = get_template('tilecoord')

    tc36 = io.read_fortran_binary(fname36,
                                  dtype,
                                  hdr=hdr,
                                  length=length,
                                  reg_ftags=True)
    tc09 = io.read_fortran_binary(fname09,
                                  dtype,
                                  hdr=hdr,
                                  length=length,
                                  reg_ftags=True)

    tc36['ind09'] = -9999

    for idx, data in tc36.iterrows():
        print('%i / %i' % (idx, len(tc36)))
        tc36.loc[idx, 'ind09'] = np.argmin((tc09.com_lat - data.com_lat)**2 +
                                           (tc09.com_lon - data.com_lon)**2)

    tc36['ind09'].to_csv(fout)
Beispiel #2
0
def extract_timeseries():

    col = 60
    row = 60

    outfile = '/Users/u0116961/data_sets/LDASsa_runs/test.csv'

    ofa = LDAS_io('ObsFcstAna', exp='US_M36_SMOS_DA_cal_scaled_yearly')
    cat = LDAS_io('xhourly', exp='US_M36_SMOS_DA_cal_scaled_yearly')

    descr = get_spc_descr(cat)

    res = cat.timeseries[['sm_surface',
                          'soil_temp_layer1']].isel(lat=row,
                                                    lon=col).to_dataframe()
    res.drop(['lat', 'lon'], axis='columns', inplace=True)
    res.columns = ['soil_moisture', 'soil_temperature']
    res.index += pd.to_timedelta('2 hours')

    for spc in ofa.timeseries['species'].values:
        res[descr[spc]] = ofa.timeseries['obs_obs'][
            spc - 1, row, col, :].to_dataframe()['obs_obs'].dropna()

    # res.drop(['soil_moisture','soil_temperature'], axis='columns').interpolate(method='linear').plot()
    # plt.tight_layout()
    # plt.show()

    res.to_csv(outfile, float_format='%.6f')
Beispiel #3
0
def increment_density_plot():

    cal = LDAS_io('incr','US_M36_SMOS_DA_calibrated_scaled')
    uncal = LDAS_io('incr','US_M36_SMOS_DA_nocal_scaled_pentadal')

    incr_var_cal = (cal.timeseries['srfexc'] + cal.timeseries['rzexc'] - cal.timeseries['catdef']).values.flatten()
    incr_var_cal = incr_var_cal[~np.isnan(incr_var_cal)]

    incr_var_uncal = (uncal.timeseries['srfexc'] + uncal.timeseries['rzexc'] - uncal.timeseries['catdef']).values.flatten()
    incr_var_uncal = incr_var_uncal[~np.isnan(incr_var_uncal)]

    ind = (incr_var_cal != 0) & (incr_var_uncal != 0)
    incr_var_cal = incr_var_cal[ind]
    incr_var_uncal = incr_var_uncal[ind]

    figsize = (10, 8)
    fontsize = 16
    plt.figure(figsize=figsize)
    plt.hist2d(incr_var_cal, incr_var_uncal, bins=[100, 100], range=[[-60, 60], [-60, 60]], cmap='jet', norm=LogNorm(), normed=True)
    plt.xlabel('Increment variance (calibrated RTM)',fontsize=fontsize)
    plt.ylabel('Increment variance (uncalibrated RTM)',fontsize=fontsize)
    plt.xticks(fontsize=fontsize)
    plt.yticks(fontsize=fontsize)

    cb = plt.colorbar()
    cb.ax.tick_params(labelsize=fontsize-2)

    plt.tight_layout()
    plt.show()
Beispiel #4
0
def plot_r_obs_fcst_err_mod_params():

    stratify = None
    # stratify = 'vegcls'
    # stratify = 'RTMvegcls'

    dir_out = Path('/Users/u0116961/Documents/work/MadKF/CLSM/error_pattern_assesment')

    lai = Dataset('/Users/u0116961/data_sets/MERRA2/MERRA2_images.nc4')['LAI'][:,:,:].mean(axis=0) # [lat,lon]
    lats = Dataset('/Users/u0116961/data_sets/MERRA2/MERRA2_images.nc4')['lat'][:]
    lons = Dataset('/Users/u0116961/data_sets/MERRA2/MERRA2_images.nc4')['lon'][:]

    root = Path(f'~/Documents/work/MadKF/CLSM/SMAP/iter_31').expanduser()

    fname = root / 'result_files' / 'mse_corrected.csv'
    mse = pd.read_csv(fname, index_col=0)

    params1 = LDAS_io(exp='US_M36_SMAP_TB_MadKF_DA_it31').read_params('catparam')
    params2 = LDAS_io(exp='US_M36_SMAP_TB_MadKF_DA_it31').read_params('RTMparam')
    params2.columns = [f'RTM{c}' for c in params2.columns]
    params = pd.concat((params1,params2), axis='columns')

    tc = LDAS_io().grid.tilecoord

    mse['lat'] = tc['com_lat']
    mse['lon'] = tc['com_lon']
    mse['LAI'] = np.nan

    for spc in range(1,5):
        params[f'mse_obs_spc{spc}'] = mse[f'mse_obs_spc{spc}']
        params[f'mse_fcst_spc{spc}'] = mse[f'mse_fcst_spc{spc}']

    for idx in mse.index:
        ind_lat = np.argmin(np.abs(lats-mse.loc[idx]['lat']))
        ind_lon = np.argmin(np.abs(lons-mse.loc[idx]['lon']))
        mse.loc[idx, 'LAI'] = lai[ind_lat, ind_lon]
        params.loc[idx, 'LAI'] = lai[ind_lat, ind_lon]

    cols = [f'mse_obs_spc{spc}' for spc in range(1,5)] + [f'mse_fcst_spc{spc}' for spc in range(1,5)]

    sns.set_context('talk', font_scale=0.8)

    if stratify is not None:
        clss = np.unique(params[stratify])
        clss = clss[clss != -9999]
        for cls in clss:
            tmp_params = params[params[stratify] == cls].drop(stratify, axis='columns')
            corr = tmp_params.corr()[cols].drop(cols).dropna().sort_values('mse_fcst_spc1')
            corr['idx'] = corr.index
            fout = dir_out / f'corr_{stratify}_{cls}.png'
            plot_fig(corr, fout, title=f'{stratify}_{cls} ({len(tmp_params)})')
    else:
        corr = params.corr()[cols].drop(cols).dropna().sort_values('mse_fcst_spc1')
        corr['idx'] = corr.index
        # fout = dir_out / 'corr_all.png'
        plot_fig(corr, fout, title=f'{stratify}_{cls}')

    plt.tight_layout()
    plt.show()
Beispiel #5
0
def plot_ObsFcstAna_image(species=8):

    io = LDAS_io('ObsFcstAna')
    img = io.read_image(2011, 7, 10, 0, 0)

    img = img[img['obs_species'] == species]

    tag = 'innov'
    img[tag] = img['obs_obs'] - img['obs_fcst']
    img.index = img['obs_tilenum'].values
    plot_ease_img(img, tag)
Beispiel #6
0
def plot_model_image():

    io = LDAS_io('xhourly')
    img = io.read_image(2011, 4, 20, 10, 30)

    tag = 'precipitation_total_surface_flux'
    tag = 'snow_mass'
    cbrange = (0, 0.0001)
    cbrange = (0, 0.6)
    cbrange = (0, 100)

    plot_ease_img(img, tag, cbrange=cbrange)
Beispiel #7
0
def append_ease_gpis():

    gpi_list = pd.read_csv(
        r"D:\data_sets\ASCAT\warp5_grid\pointlist_warp_conus.csv", index_col=0)

    gpi_list['ease_col'] = 0
    gpi_list['ease_row'] = 0

    LDAS = LDAS_io(exp='US_M36_SMOS40_noDA_cal_scaled')

    i = 0
    for idx, info in gpi_list.iterrows():
        i += 1
        print('%i / %i' % (i, len(gpi_list)))

        col, row = LDAS.grid.lonlat2colrow(gpi_list.loc[idx, 'lon'],
                                           gpi_list.loc[idx, 'lat'],
                                           domain=True)

        gpi_list.loc[idx, 'ease_col'] = col
        gpi_list.loc[idx, 'ease_row'] = row

    gpi_list.to_csv(
        r"D:\data_sets\ASCAT\warp5_grid\pointlist_warp_conus_w_ease_colrow.csv"
    )
Beispiel #8
0
    def __init__(self):
        self.path = Path('/Users/u0116961/data_sets/SMAP/timeseries')

        grid = LDAS_io().grid
        lons, lats = np.meshgrid(grid.ease_lons, grid.ease_lats)
        self.lons = lons.flatten()
        self.lats = lats.flatten()
Beispiel #9
0
def plot_ease_img(data,
                  tag,
                  llcrnrlat=24,
                  urcrnrlat=51,
                  llcrnrlon=-128,
                  urcrnrlon=-64,
                  figsize=(20, 10),
                  cbrange=(-20, 20),
                  cmap='jet',
                  title='',
                  fontsize=20):

    grid = EASE2()

    tc = LDAS_io().grid.tilecoord

    lons, lats = np.meshgrid(grid.ease_lons, grid.ease_lats)

    img = np.empty(lons.shape, dtype='float32')
    img.fill(None)

    ind_lat = tc.loc[data.index.values, 'j_indg']
    ind_lon = tc.loc[data.index.values, 'i_indg']

    img[ind_lat, ind_lon] = data[tag]
    img_masked = np.ma.masked_invalid(img)

    f = plt.figure(num=None,
                   figsize=figsize,
                   dpi=90,
                   facecolor='w',
                   edgecolor='k')

    m = Basemap(projection='mill',
                llcrnrlat=llcrnrlat,
                urcrnrlat=urcrnrlat,
                llcrnrlon=llcrnrlon,
                urcrnrlon=urcrnrlon,
                resolution='l')

    m.drawcoastlines()
    m.drawcountries()
    m.drawstates()

    im = m.pcolormesh(lons, lats, img_masked, cmap=cmap, latlon=True)

    im.set_clim(vmin=cbrange[0], vmax=cbrange[1])

    cb = m.colorbar(im, "bottom", size="7%", pad="8%")

    for t in cb.ax.get_xticklabels():
        t.set_fontsize(fontsize)
    for t in cb.ax.get_yticklabels():
        t.set_fontsize(fontsize)

    plt.title(title, fontsize=fontsize)

    plt.tight_layout()
    plt.show()
Beispiel #10
0
def generate_grid_file():

    files = find_files(r'D:\data_sets\SMOS_L3\cellfiles', '.nc')

    dgg = pd.read_csv(r"D:\data_sets\ASCAT\warp5_grid\pointlist_warp_conus.csv", index_col=0)
    ease_grid = LDAS_io(exp='US_M36_SMOS_DA_cal_scaled_yearly').grid

    grid = pd.DataFrame()

    for cnt, f in enumerate(files):
        print('%i / %i' % (cnt, len(files)))

        tmp = Dataset(f)
        lats = tmp.variables['lat'][:]
        lons = tmp.variables['lon'][:]
        tmp.close()

        offset = grid.index.values[-1] + 1 if len(grid) > 0 else 0
        idx = np.arange(offset, len(lats)*len(lons) + offset)
        tmp_grid = pd.DataFrame(columns=['lat', 'lon', 'row', 'col', 'ease_row', 'ease_col', 'dgg_cell', 'dgg_gpi'], index=idx)

        for row, lat in enumerate(lats):
            for col, lon in enumerate(lons):
                tmp_grid.loc[offset, 'lat'] = lat
                tmp_grid.loc[offset, 'lon'] = lon

                tmp_grid.loc[offset, 'row'] = row
                tmp_grid.loc[offset, 'col'] = col

                ease_col, ease_row = ease_grid.lonlat2colrow(lon, lat, domain=True)
                tmp_grid.loc[offset, 'ease_row'] = ease_row
                tmp_grid.loc[offset, 'ease_col'] = ease_col

                tmp_grid.loc[offset, 'dgg_cell'] = int(os.path.basename(f)[0:4])
                r = np.sqrt((dgg.lon - lon)**2 + (dgg.lat - lat)**2)
                tmp_grid.loc[offset, 'dgg_gpi'] = dgg.iloc[np.where(abs(r - r.min()) < 0.0001)[0][0], 0]

                offset += 1

        grid = pd.concat((grid,tmp_grid))

    grid.to_csv(r'D:\data_sets\SMOS_L3\grid.csv')
Beispiel #11
0
def Tb_evaluation():

    result_file = r'D:\work\LDAS\2018-06_rmse_uncertainty\Tb_evaluation\validation.csv'

    DA_const_err = LDAS_io('ObsFcstAna', 'US_M36_SMOS40_DA_cal_scaled')
    DA_varia_err = LDAS_io('ObsFcstAna', 'US_M36_SMOS40_DA_cal_scl_errfile')

    ismn = ISMN_io(col_offs=DA_const_err.grid.tilegrids.loc['domain',
                                                            'i_offg'],
                   row_offs=DA_const_err.grid.tilegrids.loc['domain',
                                                            'j_offg'])

    for i, (meta, ts_insitu) in enumerate(ismn.iter_stations()):

        logging.info('%i/%i' % (i, len(ismn.list)))

        res = pd.DataFrame(meta.copy()).transpose()
        col = meta.ease_col
        row = meta.ease_row

        for io, mode in zip([DA_const_err, DA_varia_err],
                            ['const_err', 'varia_err']):
            ubRMSD = np.sqrt(
                (((io.timeseries['obs_obs'][:, row, col, :] -
                   io.timeseries['obs_obs'][:, row, col, :].mean()) -
                  (io.timeseries['obs_fcst'][:, row, col, :] -
                   io.timeseries['obs_fcst'][:, row, col, :].mean()))**2
                 ).mean().values)
            ensstd = np.sqrt(io.timeseries['obs_anavar'][:, row,
                                                         col, :].mean()).values
            res['ubrmsd_' + mode] = ubRMSD
            res['ensstd_' + mode] = ensstd

        if (os.path.isfile(result_file) == False):
            res.to_csv(result_file, float_format='%0.4f')
        else:
            res.to_csv(result_file,
                       float_format='%0.4f',
                       mode='a',
                       header=False)
Beispiel #12
0
def plot_grid_coord_indices():

    exp = 'SMAP_EASEv2_M36_NORTH_SCA_SMOSrw_DA'
    domain = 'SMAP_EASEv2_M36_NORTH'

    io = LDAS_io('ObsFcstAna', exp=exp, domain=domain)

    lats = io.images.lat.values
    lons = io.images.lon.values

    f = plt.figure(figsize=(10, 5))

    llcrnrlat = 24
    urcrnrlat = 51
    llcrnrlon = -128
    urcrnrlon = -64
    m = Basemap(projection='mill',
                llcrnrlat=llcrnrlat,
                urcrnrlat=urcrnrlat,
                llcrnrlon=llcrnrlon,
                urcrnrlon=urcrnrlon,
                resolution='l')
    m.drawcoastlines(linewidth=0.5)
    m.drawcountries(linewidth=0.5)
    m.drawstates(linewidth=0.1)

    lats = lats[np.arange(0, len(lats), 15)]
    lons = lons[np.arange(0, len(lons), 15)]
    m.drawparallels(lats,
                    labels=[False, False, False, False],
                    linestyle='--',
                    linewidth=1,
                    color='red')
    m.drawmeridians(lons,
                    labels=[False, False, False, False],
                    linestyle='--',
                    linewidth=1,
                    color='red')

    x = np.zeros(len(lons))
    for i, lon in enumerate(lons):
        x[i], tmp = m(lon, lats[0])

    y = np.zeros(len(lats))
    for i, lat in enumerate(lats):
        tmp, y[i] = m(lons[-1], lat)

    plt.xticks(x[0:-1], np.arange(0, len(lons) - 1) * 15)
    plt.yticks(y[1::], np.arange(1, len(lats)) * 15)

    plt.show()
Beispiel #13
0
def colrow2easegpi(col, row, glob=False):

    # convert local to global indexing
    grid = LDAS_io().grid
    if not glob:
        col += grid.tilegrids.loc['domain', 'i_offg']
        row += grid.tilegrids.loc['domain', 'j_offg']

    grid = EASE2()
    lons, lats = np.meshgrid(grid.ease_lons, grid.ease_lats)
    cols, rows = np.meshgrid(np.arange(len(grid.ease_lons)),
                             np.arange(len(grid.ease_lats)))
    lut = pd.Series(np.arange(cols.size),
                    index=([cols.flatten(), rows.flatten()]))

    return lut.reindex(zip(col, row)).values
Beispiel #14
0
def plot_innov(spc=8, row=35, col=65):

    exp = 'SMAP_EASEv2_M36_NORTH_SCA_SMOSrw_DA'
    domain = 'SMAP_EASEv2_M36_NORTH'

    ts_scl = LDAS_io('ObsFcstAna', exp=exp, domain=domain).timeseries
    ts_usc = LDAS_io('ObsFcstAna', exp=exp, domain=domain).timeseries

    plt.figure(figsize=(18, 11))

    ax1 = plt.subplot(311)
    df = pd.DataFrame(index=ts_scl.time)
    df['obs'] = ts_scl['obs_obs'][spc, row, col].values
    df['fcst'] = ts_scl['obs_fcst'][spc, row, col].values
    df.dropna().plot(ax=ax1)

    ax2 = plt.subplot(312)
    df = pd.DataFrame(index=ts_usc.time)
    df['obs'] = ts_usc['obs_obs'][spc, row, col].values
    df['fcst'] = ts_usc['obs_fcst'][spc, row, col].values
    df.dropna().plot(ax=ax2)

    ax3 = plt.subplot(313)
    df = pd.DataFrame(index=ts_usc.time)
    df['obs_diff'] = ts_scl['obs_obs'][
        spc, row, col].values - ts_usc['obs_obs'][spc, row, col].values
    df['fcst_diff'] = ts_scl['obs_fcst'][
        spc, row, col].values - ts_usc['obs_fcst'][spc, row, col].values
    df.dropna().plot(ax=ax3)

    print(len(ts_scl['obs_obs'][spc, row, col].dropna('time')))
    print(len(ts_scl['obs_fcst'][spc, row, col].dropna('time')))
    print(len(ts_usc['obs_obs'][spc, row, col].dropna('time')))
    print(len(ts_usc['obs_fcst'][spc, row, col].dropna('time')))

    plt.tight_layout()
    plt.show()

    ts_scl.close()
    ts_usc.close()
Beispiel #15
0
def plot_innov(spc=8, row=35, col=65):

    ts_scl = LDAS_io('ObsFcstAna', exp='US_M36_SMOS_noDA_scaled').timeseries
    ts_usc = LDAS_io('ObsFcstAna', exp='US_M36_SMOS_noDA_unscaled').timeseries

    plt.figure(figsize=(18, 11))

    ax1 = plt.subplot(311)
    df = pd.DataFrame(index=ts_scl.time)
    df['obs'] = ts_scl['obs_obs'][spc, row, col].values
    df['fcst'] = ts_scl['obs_fcst'][spc, row, col].values
    df.dropna().plot(ax=ax1)

    ax2 = plt.subplot(312)
    df = pd.DataFrame(index=ts_usc.time)
    df['obs'] = ts_usc['obs_obs'][spc, row, col].values
    df['fcst'] = ts_usc['obs_fcst'][spc, row, col].values
    df.dropna().plot(ax=ax2)

    ax3 = plt.subplot(313)
    df = pd.DataFrame(index=ts_usc.time)
    df['obs_diff'] = ts_scl['obs_obs'][
        spc, row, col].values - ts_usc['obs_obs'][spc, row, col].values
    df['fcst_diff'] = ts_scl['obs_fcst'][
        spc, row, col].values - ts_usc['obs_fcst'][spc, row, col].values
    df.dropna().plot(ax=ax3)

    print len(ts_scl['obs_obs'][spc, row, col].dropna('time'))
    print len(ts_scl['obs_fcst'][spc, row, col].dropna('time'))
    print len(ts_usc['obs_obs'][spc, row, col].dropna('time'))
    print len(ts_usc['obs_fcst'][spc, row, col].dropna('time'))

    plt.tight_layout()
    plt.show()

    ts_scl.close()
    ts_usc.close()
Beispiel #16
0
def plot_ts(lon, lat):

    experiments = ['US_M36_SMAP_TB_MadKF_DA_it34', 'US_M36_SMOS40_TB_MadKF_DA_it614', 'US_M36_SMOS40_TB_MadKF_DA_it615', 'US_M36_SMOS40_TB_MadKF_DA_it613']

    f = plt.figure(figsize=(18,10))

    for i, exp in enumerate(experiments):

        if 'SMAP' in exp:
            ol = 'US_M36_SMAP_TB_OL_noScl'
        else:
            ol = 'US_M36_SMOS40_TB_OL_noScl'

        ds_ol = LDAS_io('ObsFcstAna', ol)
        ds_da = LDAS_io('ObsFcstAna', exp)

        ts_fcst = ds_ol.read_ts('obs_fcst', lon, lat)
        ts_obs = ds_da.read_ts('obs_obs', lon, lat)
        ts_ana = ds_da.read_ts('obs_ana', lon, lat)

        spc = 1
            # if spc == 1:
            #     spc_tit = 'H pol. / Asc.'
            # elif spc == 2:
            #     spc_tit = 'H pol. / Dsc.'
            # elif spc == 3:
            #     spc_tit = 'V pol. / Asc.'
            # else:
            #     spc_tit = 'V pol. / Dsc.'

        df = pd.concat((ts_fcst[spc], ts_obs[spc], ts_ana[spc]), axis='columns').dropna()
        df.columns = ['Fcst', 'Obs', 'Ana']
        df['time'] = df.index

        ax = plt.subplot(4, 1, i+1)
        g = sns.lineplot(x='time', y='Tb', hue='Variable', data=df.melt('time', df.columns[0:-1], 'Variable', 'Tb'))
        plt.legend(loc='upper right')
        if spc != 4:
            g.set(xticklabels=[])
        ax.set_xlabel('')
        ax.set_xlim([date(2010,1,1), date(2020,1,1)])
        ax.set_ylim([170,280])
        # ax.set_ylabel('')
        plt.title(exp)

    plt.tight_layout()
    plt.show()
Beispiel #17
0
def plot_fcst_uncertainties():

    io = LDAS_io('ObsFcstAna', exp='US_M36_SMOS40_noDA_cal_scaled')

    lons = io.images['lon'].values
    lats = io.images['lat'].values

    figsize = (18, 9)
    f = plt.figure(num=None,
                   figsize=figsize,
                   dpi=90,
                   facecolor='w',
                   edgecolor='k')

    ax = f.add_subplot(2, 2, 1)
    obserr = io.images.sel(species=1)['obs_fcstvar'].mean('time').values
    plot_xarr_img(obserr, lons, lats, cbrange=[0, 50])
    ax.set_title('Asc / H-pol', fontsize=16)

    ax = f.add_subplot(2, 2, 2)
    obserr = io.images.sel(species=2)['obs_fcstvar'].mean('time').values
    plot_xarr_img(obserr, lons, lats, cbrange=[0, 50])
    ax.set_title('Dsc / H-pol', fontsize=16)

    ax = f.add_subplot(2, 2, 3)
    obserr = io.images.sel(species=3)['obs_fcstvar'].mean('time').values
    plot_xarr_img(obserr, lons, lats, cbrange=[0, 50])
    ax.set_title('Asc / V-pol', fontsize=16)

    ax = f.add_subplot(2, 2, 4)
    obserr = io.images.sel(species=4)['obs_fcstvar'].mean('time').values
    plot_xarr_img(obserr, lons, lats, cbrange=[0, 50])
    ax.set_title('Dsc / V-pol', fontsize=16)

    plt.tight_layout()
    plt.show()
Beispiel #18
0
def plot_ts():

    lat, lon = 42.23745409478888, -117.08806967006959

    exp0 = 'US_M36_SMAP_TB_OL_scaled_4K_obserr'
    exp1 = 'US_M36_SMAP_TB_DA_SM_ERR_scl_clim_anom_lt_11'
    exp2 = 'US_M36_SMAP_TB_DA_SM_ERR_scl_seas_anom_st_1'

    xhr0 = LDAS_io('xhourly', exp=exp0)

    ofa1 = LDAS_io('ObsFcstAna', exp=exp1)
    xhr1 = LDAS_io('xhourly', exp=exp1)

    ofa2 = LDAS_io('ObsFcstAna', exp=exp2)
    xhr2 = LDAS_io('xhourly', exp=exp2)

    idx_lon, idx_lat = ofa1.grid.lonlat2colrow(lon, lat, domain=True)

    ts_sm0 = xhr0.timeseries.isel(lat=idx_lat, lon=idx_lon).to_dataframe()[[
        'sm_rootzone',
    ]].dropna()
    ts_sm0.columns = ['open_loop']

    ts_ofa1 = ofa1.timeseries.isel(
        lat=idx_lat, lon=idx_lon,
        species=1).to_dataframe()[['obs_ana', 'obs_fcst', 'obs_obs']].dropna()
    ts_sm1 = xhr1.timeseries.isel(lat=idx_lat, lon=idx_lon).to_dataframe()[[
        'sm_rootzone',
    ]].dropna()
    ts_sm1.columns = ['climatology-scaled']

    ts_ofa2 = ofa2.timeseries.isel(
        lat=idx_lat, lon=idx_lon,
        species=1).to_dataframe()[['obs_ana', 'obs_fcst', 'obs_obs']].dropna()
    ts_sm2 = xhr2.timeseries.isel(lat=idx_lat, lon=idx_lon).to_dataframe()[[
        'sm_rootzone',
    ]].dropna()
    ts_sm2.columns = ['seasonality-scaled']

    plt.figure(figsize=(21, 10))

    # ax = plt.subplot(4, 1, 1)
    # sns.lineplot(data=ts_ofa1, dashes=False, ax=ax)
    # plt.title(f'{lat:.2f} N, {lon:.2f} W')
    # plt.xlabel('')
    # ax.get_xaxis().set_ticks([])
    # # plt.ylabel('Tb')
    # plt.ylim(125,290)
    # plt.xlim(date(2015,3,1), date(2020,5,1))
    #
    # ax = plt.subplot(4, 1, 2)
    # sns.lineplot(data=ts_ofa2, dashes=False, ax=ax)
    # plt.title(f'{lat:.2f} N, {lon:.2f} W')
    # plt.xlabel('')
    # ax.get_xaxis().set_ticks([])
    # # plt.ylabel('Tb')
    # plt.ylim(125,290)
    # plt.xlim(date(2015,3,1), date(2020,5,1))

    ax = plt.subplot(2, 1, 1)
    ts_ofa1['innov (clim-scaled)'] = ts_ofa1['obs_obs'] - ts_ofa1['obs_ana']
    ts_ofa1['innov (seas-scaled)'] = ts_ofa2['obs_obs'] - ts_ofa2['obs_ana']
    sns.lineplot(data=ts_ofa1[['innov (clim-scaled)', 'innov (seas-scaled)']],
                 dashes=False,
                 ax=ax,
                 linewidth=1.5)
    plt.axhline(color='black', linewidth=1, linestyle='--')
    plt.xlabel('')
    ax.get_xaxis().set_ticks([])
    # plt.ylabel('O-F')
    plt.ylim(-25, 25)
    plt.xlim(date(2015, 3, 1), date(2020, 5, 1))

    ax = plt.subplot(2, 1, 2)
    sns.lineplot(data=ts_sm0, dashes=False, ax=ax, linewidth=1)
    sns.lineplot(data=ts_sm1,
                 dashes=False,
                 ax=ax,
                 linewidth=1,
                 palette=['darkorange'])
    sns.lineplot(data=ts_sm2,
                 dashes=False,
                 ax=ax,
                 linewidth=1,
                 palette=['green'])
    plt.xlabel('')
    # plt.ylabel('SM')
    plt.ylim(0.0, 0.55)
    plt.xlim(date(2015, 3, 1), date(2020, 5, 1))

    plt.tight_layout()
    plt.show()
Beispiel #19
0
def plot_cat_timeseries():

    outpath = r'D:\work\LDAS\2018-02_scaling\_new\ismn_eval\timeseries'

    fname = r"D:\work\LDAS\2018-02_scaling\_new\ismn_eval\validation.csv"
    res = pd.read_csv(fname)

    diff_srf = res['corr_DA_cal_pent_ma_sm_surface'] - res[
        'corr_DA_uncal_pent_ma_sm_surface']
    diff_rz = res['corr_DA_cal_pent_ma_sm_rootzone'] - res[
        'corr_DA_uncal_pent_ma_sm_rootzone']
    diff_prof = res['corr_DA_cal_pent_ma_sm_profile'] - res[
        'corr_DA_uncal_pent_ma_sm_profile']
    ind = (diff_srf > 0.2) | (diff_rz > 0.2) | (diff_prof > 0.2)
    res = res.loc[ind, ['network', 'station', 'lat', 'lon']]

    ismn = ISMN_io()
    cal = LDAS_io('xhourly', 'US_M36_SMOS_DA_calibrated_scaled')
    uncal = LDAS_io('xhourly', 'US_M36_SMOS_DA_nocal_scaled_pentadal')

    variables = ['sm_surface', 'sm_rootzone', 'sm_profile']

    for idx, stat in res.iterrows():

        fname = os.path.join(outpath,
                             stat.network + '_' + stat.station + '.png')

        ts_ismn = ismn.read(stat.network, stat.station)
        lat = stat.lat
        lon = stat.lon

        plt.figure(figsize=(17, 9))

        for i, var in enumerate(variables):

            ax = plt.subplot(3, 1, i + 1)

            ts_cal = calc_anomaly(cal.read_ts(var, lon, lat), method='ma')
            ts_cal.index += pd.to_timedelta('2 hours')
            ts_uncal = calc_anomaly(uncal.read_ts(var, lon, lat), method='ma')
            ts_uncal.index += pd.to_timedelta('2 hours')

            df = pd.DataFrame({
                'cal': ts_cal,
                'uncal': ts_uncal,
                'insitu': calc_anomaly(ts_ismn[var], method='ma')
            }).dropna()
            if len(df) > 0:
                df.plot(ax=ax)
            else:
                continue

            title = 'R(ismn - cal) = %.2f , R(ismn - uncal) = %.2f' % (
                df.corr().loc['insitu', 'cal'], df.corr().loc['insitu',
                                                              'uncal'])

            ax.set_title(title, fontsize=12)
            ax.set_xlim('2010-01-01', '2016-01-01')
            ax.set_ylim(-0.3, 0.3)
            ax.set_xlabel('')

        plt.tight_layout()

        plt.savefig(fname, dpi=150)
        plt.close()
Beispiel #20
0
def plot_P_R_estimates():

    exp = 'US_M36_SMOS_DA_nocal_scaled_pentadal'

    io = LDAS_io('ObsFcstAna', exp)

    outpath = r"D:\work\LDAS\2018-02_scaling\uncertainty_estimates"

    lons = io.timeseries.lon.values
    lats = io.timeseries.lat.values

    lons, lats = np.meshgrid(lons, lats)
    llcrnrlat = 24
    urcrnrlat = 51
    llcrnrlon = -128
    urcrnrlon = -64

    cmap = 'jet'

    fontsize = 16

    cbrange = (0, 60)

    for spc in np.arange(len(io.timeseries.species)):

        R_est = ((io.timeseries['obs_obs'][spc, :, :, :] -
                  io.timeseries['obs_fcst'][spc, :, :, :]) *
                 (io.timeseries['obs_obs'][spc, :, :, :] -
                  io.timeseries['obs_ana'][spc, :, :, :])).mean(dim='time')

        P_est = ((io.timeseries['obs_obs'][spc, :, :, :] -
                  io.timeseries['obs_fcst'][spc, :, :, :]) *
                 (io.timeseries['obs_ana'][spc, :, :, :] -
                  io.timeseries['obs_fcst'][spc, :, :, :])).mean(dim='time')

        f = plt.figure(figsize=(16, 5))

        ax = plt.subplot(121)
        m = Basemap(projection='mill',
                    llcrnrlat=llcrnrlat,
                    urcrnrlat=urcrnrlat,
                    llcrnrlon=llcrnrlon,
                    urcrnrlon=urcrnrlon,
                    resolution='c')
        m.drawcoastlines()
        m.drawcountries()
        m.drawstates()
        plt_img = np.ma.masked_invalid(R_est)
        im = m.pcolormesh(lons, lats, plt_img, cmap=cmap, latlon=True)
        im.set_clim(vmin=cbrange[0], vmax=cbrange[1])
        cb = m.colorbar(im, "bottom", size="7%", pad="8%")
        for t in cb.ax.get_xticklabels():
            t.set_fontsize(fontsize)
        for t in cb.ax.get_yticklabels():
            t.set_fontsize(fontsize)
        ax.set_title('R', fontsize=fontsize)

        ax = plt.subplot(122)
        m = Basemap(projection='mill',
                    llcrnrlat=llcrnrlat,
                    urcrnrlat=urcrnrlat,
                    llcrnrlon=llcrnrlon,
                    urcrnrlon=urcrnrlon,
                    resolution='c')
        m.drawcoastlines()
        m.drawcountries()
        m.drawstates()
        plt_img = np.ma.masked_invalid(P_est)
        im = m.pcolormesh(lons, lats, plt_img, cmap=cmap, latlon=True)
        im.set_clim(vmin=cbrange[0], vmax=cbrange[1])
        cb = m.colorbar(im, "bottom", size="7%", pad="8%")
        for t in cb.ax.get_xticklabels():
            t.set_fontsize(fontsize)
        for t in cb.ax.get_yticklabels():
            t.set_fontsize(fontsize)
        ax.set_title('HPH$^T$', fontsize=fontsize)

        plt.tight_layout()

        fname = os.path.join(outpath, 'R_P_est_%i.png' % (spc + 1))

        plt.savefig(fname, dpi=f.dpi)
        plt.close()
Beispiel #21
0
def scatterplot_RTMparam_incr_innov_diff():

    outpath = r'D:\work\LDAS\2018-02_scaling\_new\diagnostics'

    diag = xr.open_dataset(
        r"D:\work\LDAS\2018-02_scaling\_new\diagnostics\filter_diagnostics.nc")

    params_cal = LDAS_io(
        exp='US_M36_SMOS_DA_calibrated_scaled').read_params('RTMparam')
    params_uncal = LDAS_io(
        exp='US_M36_SMOS_DA_nocal_scaled_harmonic').read_params('RTMparam')

    tc = LDAS_io().grid.tilecoord
    tg = LDAS_io().grid.tilegrids

    tc.i_indg -= tg.loc['domain', 'i_offg']  # col / lon
    tc.j_indg -= tg.loc['domain', 'j_offg']  # row / lat

    ind_lon, ind_lat = tc.i_indg.values, tc.j_indg.values

    ind_cal = 1
    ind_uncal = 3

    fontsize = 20

    incin = 'incr_'
    mv = 'mean'

    modes = ['catdef_', 'srfexc_', 'rzexc_']
    params = ['bh', 'bv', 'omega', 'rgh_hmin', 'rgh_hmax']

    plt.figure(figsize=(18, 9))

    i = 0
    for r, mode in enumerate(modes):
        for c, param in enumerate(params):
            i += 1
            ax = plt.subplot(3, 5, i)

            # xdiff = (params_cal[param] - params_uncal[param]).values
            # ydiff = diag[incin+mode+mv][:,:,ind_cal].values[ind_lat,ind_lon] - diag[incin+mode+mv][:,:,ind_uncal].values[ind_lat,ind_lon]

            xdiff = params_uncal[param].values
            ydiff = diag[incin + mode + mv][:, :, ind_uncal].values[ind_lat,
                                                                    ind_lon]

            ind_valid = (~np.isnan(xdiff)) & (~np.isnan(ydiff))

            xdiff = xdiff[ind_valid]
            ydiff = ydiff[ind_valid]

            s = np.argsort(xdiff)
            xdiff = xdiff[s]
            ydiff = ydiff[s]

            ax.plot(xdiff,
                    ydiff,
                    'o',
                    markersize=3,
                    markerfacecolor='k',
                    markeredgecolor='k')

            fit = np.polyfit(xdiff, ydiff, deg=2)

            ax.plot(xdiff,
                    fit[0] * xdiff**2 + fit[1] * xdiff + fit[2],
                    color='red')

            if param == 'bh':
                ax.set_ylabel(mode[0:-1])
            if mode == 'rzexc_':
                ax.set_xlabel(param)

            corr = pearsonr(xdiff, ydiff)[0]
            rho = spearmanr(xdiff, ydiff)[0]

            ax.set_title('R = %.2f, $\\rho$ = %.2f' % (corr, rho))

    plt.tight_layout()
    plt.show()
Beispiel #22
0
def plot_ensemble_uncertainty_vs_ubrmsd():

    DA_const_err = LDAS_io('ensstd', 'US_M36_SMOS40_DA_cal_scaled')
    DA_varia_err = LDAS_io('ensstd', 'US_M36_SMOS40_DA_cal_scl_errfile')

    t_ana = pd.DatetimeIndex(
        LDAS_io('ObsFcstAna', 'US_M36_SMOS40_DA_cal_scaled').timeseries.time.
        values).sort_values()

    res = pd.read_csv(
        r'D:\work\LDAS\2018-06_rmse_uncertainty\insitu_evaluation\validation.csv',
        index_col=0)
    res2 = pd.read_csv(
        r'D:\work\LDAS\2018-06_rmse_uncertainty\TCA_evaluation\validation.csv',
        index_col=0)

    res['RMSE_model_DA_const_err_absolute_sm_surface'] = res2[
        'RMSE_model_DA_const_err_absolute_sm_surface']
    res['RMSE_model_DA_varia_err_absolute_sm_surface'] = res2[
        'RMSE_model_DA_varia_err_absolute_sm_surface']

    res['ensstd_const_err'] = np.nan
    res['ensstd_varia_err'] = np.nan

    param = 'sm_surface'

    for idx, vals in res.iterrows():
        print(idx)
        res.loc[idx, 'ensstd_const_err'] = DA_const_err.timeseries[param][
            vals['ease_row'], vals['ease_col'], :].to_pandas().loc[
                t_ana - pd.to_timedelta('2 hours')].mean()
        res.loc[idx, 'ensstd_varia_err'] = DA_varia_err.timeseries[param][
            vals['ease_row'], vals['ease_col'], :].to_pandas().loc[
                t_ana - pd.to_timedelta('2 hours')].mean()

    xlim = [0, 0.12]
    ylim = [0, 0.12]

    plt.figure(figsize=(13, 6))

    # ---------------------------------------------------------------------------------
    ax = plt.subplot(121)

    xx = res['ensstd_const_err']
    yy = res['ubrmsd_DA_const_err_absolute_sm_surface']
    zz = res['RMSE_model_DA_const_err_absolute_sm_surface']

    a = res[[
        'ubrmsd_DA_const_err_absolute_sm_surface',
        'RMSE_model_DA_const_err_absolute_sm_surface'
    ]]
    b = res[[
        'ensstd_const_err',
    ]]
    print(a.apply(lambda col: col.corr(b.ix[:, 0], method='spearman'), axis=0))

    ax.plot(xx,
            yy,
            'o',
            markersize=3,
            markerfacecolor='k',
            markeredgecolor='k')
    # (xx - yy).hist(bins=20, range=(-0.2, 0.02))
    # (xx - zz).hist(bins=20, range=(-0.06, 0.06))

    ax.plot(xlim, ylim, '--k')
    ax.set_title('Constant observation error')
    ax.set_xlim(xlim)
    ax.set_ylim(xlim)
    # ax.set_xlabel('ensemble standard deviation minus ubRMSD / TCA RMSE')
    ax.set_xlabel('ensemble standard deviation')
    ax.set_ylabel('ubRMSD')

    # print(np.percentile((xx-yy).dropna(), [5,25,50,75,95]))
    # print(np.percentile(yy.dropna(), [5,25,50,75,95]))

    # ---------------------------------------------------------------------------------

    ax = plt.subplot(122)
    xx = res['ensstd_varia_err']
    yy = res['ubrmsd_DA_varia_err_absolute_sm_surface']
    zz = res['RMSE_model_DA_varia_err_absolute_sm_surface']

    a = res[[
        'ubrmsd_DA_varia_err_absolute_sm_surface',
        'RMSE_model_DA_varia_err_absolute_sm_surface'
    ]]
    b = res[[
        'ensstd_varia_err',
    ]]
    print(a.apply(lambda col: col.corr(b.ix[:, 0], method='spearman'), axis=0))

    ax.plot(xx,
            yy,
            'o',
            markersize=3,
            markerfacecolor='k',
            markeredgecolor='k')
    # (xx - yy).hist(bins=20, range=(-0.2, 0.02))
    # (xx - zz).hist(bins=20, range=(-0.06, 0.06))

    ax.plot(xlim, ylim, '--k')
    ax.set_title('Variable observation error')
    ax.set_xlim(xlim)
    ax.set_ylim(xlim)
    # ax.set_xlabel('ensemble standard deviation minus ubRMSD / TCA RMSE')
    ax.set_xlabel('ensemble standard deviation')
    ax.set_ylabel('ubRMSD')

    # print(np.percentile((xx-yy).dropna(), [5,25,50,75,95]))
    # print(np.percentile(yy.dropna(), [5,25,50,75,95]))

    plt.show()
Beispiel #23
0
def plot_catparams(exp, domain, root, outpath):

    io = LDAS_io('catparam', exp=exp, domain=domain, root=root)

    tc = io.grid.tilecoord
    tg = io.grid.tilegrids

    lons = io.grid.ease_lons[tc['i_indg'].min():(tc['i_indg'].max() + 1)]
    lats = io.grid.ease_lats[tc['j_indg'].min():(tc['j_indg'].max() + 1)]

    tc.i_indg -= tg.loc['domain', 'i_offg']  # col / lon
    tc.j_indg -= tg.loc['domain', 'j_offg']  # row / lat

    lons, lats = np.meshgrid(lons, lats)

    llcrnrlat = np.min(lats)
    urcrnrlat = np.max(lats)
    llcrnrlon = np.min(lons)
    urcrnrlon = np.max(lons)
    figsize = (20, 10)
    # cbrange = (-20, 20)
    cmap = 'jet'
    fontsize = 20

    params = LDAS_io(exp=exp, domain=domain).read_params('catparam')

    for param in params:

        if not os.path.exists(os.path.join(outpath, exp)):
            os.mkdir(os.path.join(outpath, exp))

        fname = os.path.join(outpath, exp, param + '.png')

        img = np.full(lons.shape, np.nan)
        img[tc.j_indg.values, tc.i_indg.values] = params[param].values
        img_masked = np.ma.masked_invalid(img)

        f = plt.figure(num=None,
                       figsize=figsize,
                       dpi=90,
                       facecolor='w',
                       edgecolor='k')

        m = Basemap(projection='mill',
                    llcrnrlat=llcrnrlat,
                    urcrnrlat=urcrnrlat,
                    llcrnrlon=llcrnrlon,
                    urcrnrlon=urcrnrlon,
                    resolution='l')

        m.drawcoastlines()
        m.drawcountries()
        m.drawstates()

        # draw parallels and meridians.
        # label parallels on right and top
        # meridians on bottom and left
        parallels = np.arange(0., 81, 10.)
        # labels = [left,right,top,bottom]
        m.drawparallels(parallels, labels=[False, True, True, False])
        meridians = np.arange(10., 351., 20.)
        m.drawmeridians(meridians, labels=[True, False, False, True])

        im = m.pcolormesh(lons, lats, img_masked, cmap=cmap, latlon=True)

        cb = m.colorbar(im, "bottom", size="7%", pad="8%")

        for t in cb.ax.get_xticklabels():
            t.set_fontsize(fontsize)
        for t in cb.ax.get_yticklabels():
            t.set_fontsize(fontsize)

        plt.title(param)

        plt.savefig(fname, dpi=f.dpi)
        plt.close()
Beispiel #24
0
def plot_rtm_parameters(exp, domain, root, outpath):

    experiments = [
        'US_M36_SMOS_DA_calibrated_scaled',
        'US_M36_SMOS_DA_nocal_scaled_harmonic'
    ]
    experiments = [exp]

    io = LDAS_io('catparam', exp=exp, domain=domain, root=root)

    tc = io.grid.tilecoord
    tg = io.grid.tilegrids

    lons = io.grid.ease_lons[tc['i_indg'].min():(tc['i_indg'].max() + 1)]
    lats = io.grid.ease_lats[tc['j_indg'].min():(tc['j_indg'].max() + 1)]

    tc.i_indg -= tg.loc['domain', 'i_offg']  # col / lon
    tc.j_indg -= tg.loc['domain', 'j_offg']  # row / lat

    lons, lats = np.meshgrid(lons, lats)

    llcrnrlat = np.min(lats)
    urcrnrlat = np.max(lats)
    llcrnrlon = np.min(lons)
    urcrnrlon = np.max(lons)

    figsize = (20, 10)
    # cbrange = (-20, 20)
    cmap = 'jet'
    fontsize = 20

    for exp in experiments:

        if not os.path.exists(os.path.join(outpath, exp)):
            os.mkdir(os.path.join(outpath, exp))

        fname = os.path.join(outpath, exp, param + '.png')

        params = LDAS_io(exp=exp).read_params('RTMparam')

        for param in params:

            img = np.full(lons.shape, np.nan)
            img[tc.j_indg.values, tc.i_indg.values] = params[param].values
            img_masked = np.ma.masked_invalid(img)

            f = plt.figure(num=None,
                           figsize=figsize,
                           dpi=90,
                           facecolor='w',
                           edgecolor='k')

            m = Basemap(projection='mill',
                        llcrnrlat=llcrnrlat,
                        urcrnrlat=urcrnrlat,
                        llcrnrlon=llcrnrlon,
                        urcrnrlon=urcrnrlon,
                        resolution='l')

            m.drawcoastlines()
            m.drawcountries()
            m.drawstates()

            im = m.pcolormesh(lons, lats, img_masked, cmap=cmap, latlon=True)

            # im.set_clim(vmin=cbrange[0], vmax=cbrange[1])

            cb = m.colorbar(im, "bottom", size="7%", pad="8%")

            for t in cb.ax.get_xticklabels():
                t.set_fontsize(fontsize)
            for t in cb.ax.get_yticklabels():
                t.set_fontsize(fontsize)

            plt.title(param)

            plt.savefig(fname, dpi=f.dpi)
            plt.close()
Beispiel #25
0
def TCA_insitu_evaluation():

    result_file = r'D:\work\LDAS\2018-06_rmse_uncertainty\TCA_evaluation\validation.csv'

    noDA = LDAS_io('xhourly', 'US_M36_SMOS40_noDA_cal_scaled')

    DA_const_err = LDAS_io('xhourly', 'US_M36_SMOS40_DA_cal_scaled')
    DA_varia_err = LDAS_io('xhourly', 'US_M36_SMOS40_DA_cal_scl_errfile')

    t_ana = pd.DatetimeIndex(
        LDAS_io('ObsFcstAna', 'US_M36_SMOS40_DA_cal_scaled').timeseries.time.
        values).sort_values()

    ascat = HSAF_io()
    gpi_list = pd.read_csv(
        r"D:\data_sets\ASCAT\warp5_grid\pointlist_warp_conus.csv", index_col=0)

    ismn = ISMN_io(col_offs=noDA.grid.tilegrids.loc['domain', 'i_offg'],
                   row_offs=noDA.grid.tilegrids.loc['domain', 'j_offg'])

    runs = ['noDA', 'DA_const_err', 'DA_varia_err']
    tss = [noDA.timeseries, DA_const_err.timeseries, DA_varia_err.timeseries]

    variables = [
        'sm_surface',
    ]
    modes = [
        'absolute',
    ]

    for i, (meta, ts_insitu) in enumerate(ismn.iter_stations()):
        logging.info('%i/%i' % (i, len(ismn.list)))

        try:

            res = pd.DataFrame(meta.copy()).transpose()
            col = meta.ease_col
            row = meta.ease_row

            gpi = lonlat2gpi(meta.lon, meta.lat, gpi_list)

            ts_asc = ascat.read(gpi, resample_time=False)
            if ts_asc is None:
                continue
            ts_asc.name = 'ascat'
            ts_asc = pd.DataFrame(ts_asc)

            for var in variables:
                for mode in modes:

                    ts_ins = ts_insitu[var].dropna()
                    ts_ins.name = 'insitu'
                    ts_ins = pd.DataFrame(ts_ins)

                    for run, ts_model in zip(runs, tss):

                        ind = (ts_model['snow_mass'][row, col].values == 0) & (
                            ts_model['soil_temp_layer1'][row,
                                                         col].values > 277.15)
                        ts_mod = ts_model[var][row, col].to_series().loc[ind]
                        ts_mod.index += pd.to_timedelta('2 hours')
                        ts_mod = ts_mod.loc[t_ana].dropna()
                        ts_mod.name = 'model'
                        ts_mod = pd.DataFrame(ts_mod)

                        matched = df_match(ts_mod, ts_asc, ts_ins, window=0.5)
                        data = ts_mod.join(matched[0][[
                            'ascat',
                        ]]).join(matched[1][[
                            'insitu',
                        ]]).dropna()

                        tc_res = TCA(data['model'].values,
                                     data['ascat'].values,
                                     data['insitu'].values)

                        res['RMSE_model_' + run + '_' + mode + '_' +
                            var] = tc_res[1][0]
                        res['RMSE_ascat_' + run + '_' + mode + '_' +
                            var] = tc_res[1][1]
                        res['RMSE_insitu_' + run + '_' + mode + '_' +
                            var] = tc_res[1][2]

                        res['beta_ascat_' + run + '_' + mode + '_' +
                            var] = tc_res[2][1]
                        res['beta_insitu_' + run + '_' + mode + '_' +
                            var] = tc_res[2][2]

                        res['len_' + mode + '_' + var] = len(data)

            if (os.path.isfile(result_file) == False):
                res.to_csv(result_file, float_format='%0.4f')
            else:
                res.to_csv(result_file,
                           float_format='%0.4f',
                           mode='a',
                           header=False)

        except:
            continue
Beispiel #26
0
def filter_diagnostics_evaluation():

    result_file = r'D:\work\LDAS\2018-06_rmse_uncertainty\filter_diagnostics.nc'

    cal_DA_clim_innov = LDAS_io('ObsFcstAna', 'US_M36_SMOS40_DA_cal_scaled')
    cal_DA_seas_innov = LDAS_io('ObsFcstAna',
                                'US_M36_SMOS40_DA_cal_scl_errfile')

    cal_DA_clim_incr = LDAS_io('incr', 'US_M36_SMOS40_DA_cal_scaled')
    cal_DA_seas_incr = LDAS_io('incr', 'US_M36_SMOS40_DA_cal_scl_errfile')

    runs = OrderedDict([
        (1, [cal_DA_clim_innov.timeseries, cal_DA_clim_incr.timeseries]),
        (2, [cal_DA_seas_innov.timeseries, cal_DA_seas_incr.timeseries])
    ])

    tags = [
        'innov_mean', 'innov_var', 'norm_innov_mean', 'norm_innov_var',
        'n_valid_innov', 'incr_catdef_mean', 'incr_catdef_var',
        'incr_rzexc_mean', 'incr_rzexc_var', 'incr_srfexc_mean',
        'incr_srfexc_var'
    ]

    lons = np.unique(cal_DA_clim_innov.grid.tilecoord['com_lon'].values)
    lats = np.unique(cal_DA_clim_innov.grid.tilecoord['com_lat'].values)[::-1]

    species = cal_DA_clim_innov.timeseries['species'].values

    ds = ncfile_init(result_file, lats, lons, runs.keys(), species, tags)

    for i_run, run in enumerate(runs):
        for i_spc, spc in enumerate(species):

            logging.info('run %i, species %i' % (i_run, i_spc))

            ds['innov_mean'][:, :, i_run,
                             i_spc] = (runs[run][0]['obs_obs'][i_spc] -
                                       runs[run][0]['obs_fcst'][i_spc]).mean(
                                           dim='time').values
            ds['innov_var'][:, :, i_run,
                            i_spc] = (runs[run][0]['obs_obs'][i_spc] -
                                      runs[run][0]['obs_fcst'][i_spc]).var(
                                          dim='time').values
            ds['norm_innov_mean'][:, :, i_run, i_spc] = (
                (runs[run][0]['obs_obs'][i_spc] -
                 runs[run][0]['obs_fcst'][i_spc]) /
                np.sqrt(runs[run][0]['obs_obsvar'][i_spc] +
                        runs[run][0]['obs_fcstvar'][i_spc])).mean(
                            dim='time').values
            ds['norm_innov_var'][:, :, i_run, i_spc] = (
                (runs[run][0]['obs_obs'][i_spc] -
                 runs[run][0]['obs_fcst'][i_spc]) /
                np.sqrt(runs[run][0]['obs_obsvar'][i_spc] +
                        runs[run][0]['obs_fcstvar'][i_spc])).var(
                            dim='time').values

            tmp = runs[run][0]['obs_obs'][i_spc].values
            np.place(tmp, ~np.isnan(tmp), 1.)
            np.place(tmp, np.isnan(tmp), 0.)
            ds['n_valid_innov'][:, :, i_run, i_spc] = tmp.sum(axis=2)

        if len(runs[run]) == 2:
            np.place(runs[run][1]['catdef'].values,
                     runs[run][1]['catdef'].values == 0, np.nan)
            np.place(runs[run][1]['rzexc'].values,
                     runs[run][1]['rzexc'].values == 0, np.nan)
            np.place(runs[run][1]['srfexc'].values,
                     runs[run][1]['srfexc'].values == 0, np.nan)
            ds['incr_catdef_mean'][:, :, i_run] = runs[run][1]['catdef'].mean(
                dim='time').values
            ds['incr_catdef_var'][:, :, i_run] = runs[run][1]['catdef'].var(
                dim='time').values
            ds['incr_rzexc_mean'][:, :, i_run] = runs[run][1]['rzexc'].mean(
                dim='time').values
            ds['incr_rzexc_var'][:, :, i_run] = runs[run][1]['rzexc'].var(
                dim='time').values
            ds['incr_srfexc_mean'][:, :, i_run] = runs[run][1]['srfexc'].mean(
                dim='time').values
            ds['incr_srfexc_var'][:, :, i_run] = runs[run][1]['srfexc'].var(
                dim='time').values

    ds.close()
Beispiel #27
0
            # Confidence intervals with corrected sample size
            z_l, z_u = norm.interval(alpha, loc=z, scale=(n_corr - 3)**(-0.5))
            CI_l = (np.exp(2 * z_l) - 1) / (np.exp(2 * z_l) + 1)
            CI_u = (np.exp(2 * z_u) - 1) / (np.exp(2 * z_u) + 1)

            res.loc[ds1, ds2, 'CI_l_corr'] = CI_l
            res.loc[ds1, ds2, 'CI_u_corr'] = CI_u

    if flatten is True:
        if len(cols) == 2:
            res = pd.Series(res.loc[cols[0], cols[1], :],
                            index=stats,
                            dtype='float32')

    return res


if __name__ == '__main__':

    from pyldas.interface import LDAS_io
    io = LDAS_io('ObsFcstAna', 'US_M36_SMOS40_DA_cal_scaled')
    ser1 = io.timeseries['obs_fcst'][0, 40, 40].to_series()
    ser2 = io.timeseries['obs_obs'][0, 40, 40].to_series()
    df = pd.DataFrame([ser1, ser2]).swapaxes(0, 1)

    print(bias(df))
    print(ubRMSD(df))
    print(Pearson_R(df))
    # for val in res.loc['obs_ana','obs_fcst',:]:
    #     print val.values
def EC_ascat_smap_ismn_ldas():

    result_file = Path('/Users/u0116961/Documents/work/extended_collocation/ec_ascat_smap_ismn_ldas.csv')

    names = ['insitu', 'ascat', 'smap', 'ol', 'da']
    combs = list(combinations(names, 2))

    ds_ol = LDAS_io('xhourly', 'US_M36_SMAP_TB_OL_noScl').timeseries
    ds_da = LDAS_io('xhourly', 'US_M36_SMAP_TB_MadKF_DA_it11').timeseries
    ds_da_ana = LDAS_io('ObsFcstAna', 'US_M36_SMAP_TB_MadKF_DA_it11').timeseries['obs_ana']
    tg = LDAS_io().grid.tilegrids

    modes = ['absolute','longterm','shortterm']

    ismn = ISMN_io()
    ismn.list = ismn.list.iloc[70::]
    ascat = HSAF_io()
    smap = SMAP_io()

    lut = pd.read_csv(Paths().lut, index_col=0)

    i = 0
    for meta, ts_insitu in ismn.iter_stations(surface_only=True):
        i += 1
        logging.info('%i/%i' % (i, len(ismn.list)))

        try:
            if len(ts_insitu := ts_insitu['2015-04-01':'2020-04-01'].resample('1d').mean().dropna()) < 25:
                continue
        except:
            continue

        res = pd.DataFrame(meta.copy()).transpose()
        col = meta.ease_col
        row = meta.ease_row

        colg = col + tg.loc['domain', 'i_offg']  # col / lon
        rowg = row + tg.loc['domain', 'j_offg']  # row / lat

        tmp_lut = lut[(lut.ease2_col == colg) & (lut.ease2_row == rowg)]
        if len(tmp_lut) == 0:
            continue

        gpi_smap = tmp_lut.index.values[0]
        gpi_ascat = tmp_lut.ascat_gpi.values[0]

        try:
            ts_ascat = ascat.read(gpi_ascat, resample_time=False).resample('1d').mean().dropna()
            ts_ascat = ts_ascat[~ts_ascat.index.duplicated(keep='first')]
            ts_ascat.name = 'ASCAT'
        except:
            continue

        ts_smap = smap.read(gpi_smap)

        if (ts_ascat is None) | (ts_smap is None):
            continue

        ind = (ds_ol['snow_mass'][:, row, col].values == 0)&(ds_ol['soil_temp_layer1'][:, row, col].values > 277.15)
        ts_ol = ds_ol['sm_surface'][:, row, col].to_series().loc[ind].dropna()
        ts_ol.index += pd.to_timedelta('2 hours')

        ind = (ds_da['snow_mass'][:, row, col].values == 0)&(ds_da['soil_temp_layer1'][:, row, col].values > 277.15)
        ts_da = ds_da['sm_surface'][:, row, col].to_series().loc[ind].dropna()
        ts_da.index += pd.to_timedelta('2 hours')

        for mode in modes:

            if mode == 'absolute':
                ts_ins = ts_insitu.copy()
                ts_asc = ts_ascat.copy()
                ts_smp = ts_smap.copy()
                ts_ol = ts_ol.copy()
                ts_da = ts_da.copy()
            else:
                ts_ins = calc_anom(ts_ins.copy(), longterm=(mode=='longterm')).dropna()
                ts_asc = calc_anom(ts_asc.copy(), longterm=(mode == 'longterm')).dropna()
                ts_smp = calc_anom(ts_smp.copy(), longterm=(mode == 'longterm')).dropna()
                ts_ol = calc_anom(ts_ol.copy(), longterm=(mode == 'longterm')).dropna()
                ts_da = calc_anom(ts_da.copy(), longterm=(mode == 'longterm')).dropna()

            tmp = pd.DataFrame(dict(zip(names, [ts_ins, ts_asc, ts_smp, ts_ol, ts_da]))).dropna()

            corr = tmp.corr()
            ec_res = ecol(tmp[['insitu', 'ascat', 'smap', 'ol', 'da']], correlated=[['smap', 'ol'], ['smap', 'da'], ['ol', 'da']])

            res[f'len_{mode}'] = len(tmp)
            for c in combs:
                res[f'corr_{"_".join(c)}'] = corr.loc[c]
            res[f'err_corr_smap_ol_{mode}'] = ec_res['err_corr_smap_ol']
            res[f'err_corr_smap_da_{mode}'] = ec_res['err_corr_smap_da']
            res[f'err_corr_ol_da_{mode}'] = ec_res['err_corr_ol_da']

        if not result_file.exists():
            res.to_csv(result_file, float_format='%0.4f')
        else:
            res.to_csv(result_file, float_format='%0.4f', mode='a', header=False)
import sys
sys.path.append(r'/data/leuven/317/vsc31786/miniconda/bin/')

from pyldas.interface import LDAS_io

exp = 'SMAP_EASEv2_M36_NORTH_SCA_SMOSrw_DA'
domain = 'SMAP_EASEv2_M36_NORTH'

exp = 'BE_M36_EASEv2_SMAPin_L4SM_v001'
domain = 'SMAP_EASEv2_M36_GLOBAL'

exp = 'SMAP_EASEv2_M09_SI_SMOSfw_DA'
domain = 'SMAP_EASEv2_M09'

exp = 'SMAPL4v3_M09_PM'
domain = 'SMAP_EASEv2_M09'

io = LDAS_io('daily', exp, domain)
io.bin2netcdf()

#io = LDAS_io('xhourly', exp, domain)
#io.bin2netcdf()

#io = LDAS_io('incr', exp, domain)
#io.bin2netcdf()

#io = LDAS_io('ensstd', exp, domain)
#io.bin2netcdf()
Beispiel #30
0
def plot_rtm_parameter_differences():

    outpath = r'C:\Users\u0116961\Documents\work\LDASsa\2018-02_scaling\RTM_parameters\differences'

    tc = LDAS_io().grid.tilecoord
    tg = LDAS_io().grid.tilegrids

    exp = 'SMAP_EASEv2_M36_NORTH_SCA_SMOSrw_DA'
    domain = 'SMAP_EASEv2_M36_NORTH'

    tc.i_indg -= tg.loc['domain', 'i_offg']  # col / lon
    tc.j_indg -= tg.loc['domain', 'j_offg']  # row / lat

    lons = LDAS_io().grid.ease_lons[np.min(LDAS_io().grid.tilecoord.i_indg):(
        np.max(LDAS_io().grid.tilecoord.i_indg) + 1)]
    lats = LDAS_io().grid.ease_lats[np.min(LDAS_io().grid.tilecoord.j_indg):(
        np.max(LDAS_io().grid.tilecoord.j_indg) + 1)]

    lons, lats = np.meshgrid(lons, lats)

    llcrnrlat = 24
    urcrnrlat = 51
    llcrnrlon = -128
    urcrnrlon = -64
    figsize = (20, 10)
    #
    cmap = 'RdYlBu'
    fontsize = 20

    params_cal = LDAS_io(
        exp='US_M36_SMOS_DA_calibrated_scaled').read_params('RTMparam')
    params_uncal = LDAS_io(
        exp='US_M36_SMOS_DA_nocal_scaled_harmonic').read_params('RTMparam')

    for param in params_cal:

        if (param == 'bh') | (param == 'bv'):
            cbrange = (-0.3, 0.3)
        elif (param == 'omega'):
            cbrange = (-0.1, 0.1)
        else:
            cbrange = (-1, 1)

        fname = os.path.join(outpath, param + '.png')

        img = np.full(lons.shape, np.nan)
        img[tc.j_indg.values, tc.i_indg.
            values] = params_cal[param].values - params_uncal[param].values
        img_masked = np.ma.masked_invalid(img)

        f = plt.figure(num=None,
                       figsize=figsize,
                       dpi=90,
                       facecolor='w',
                       edgecolor='k')

        m = Basemap(projection='mill',
                    llcrnrlat=llcrnrlat,
                    urcrnrlat=urcrnrlat,
                    llcrnrlon=llcrnrlon,
                    urcrnrlon=urcrnrlon,
                    resolution='l')

        m.drawcoastlines()
        m.drawcountries()
        m.drawstates()

        im = m.pcolormesh(lons, lats, img_masked, cmap=cmap, latlon=True)

        im.set_clim(vmin=cbrange[0], vmax=cbrange[1])

        cb = m.colorbar(im, "bottom", size="7%", pad="8%")

        for t in cb.ax.get_xticklabels():
            t.set_fontsize(fontsize)
        for t in cb.ax.get_yticklabels():
            t.set_fontsize(fontsize)

        plt.title(param)

        plt.savefig(fname, dpi=f.dpi)
        plt.close()