Exemple #1
0
def test_pages_proxy_manager_seeded():
    drange = [1970, 2000]
    update_dict = {
        'proxies': {
            'pages': {
                'datadir_proxy': '/home/disk/p/wperkins/Research/LMR/tests',
                'datafile_proxy': 'test_pdata.pckl',
                'metafile_proxy': 'test_meta.pckl'
            },
            'proxy_frac': 0.75
        },
        'core': {
            'seed': 1
        }
    }
    cfg_obj = cfg.Config(**update_dict)

    pmanager = proxy2.ProxyManager(cfg_obj, drange)
    pmanager2 = proxy2.ProxyManager(cfg_obj, drange)

    assert len(pmanager.ind_assim) == len(pmanager2.ind_assim)

    for pid in pmanager.ind_assim:
        assert pid in pmanager2.ind_assim

    for pid in pmanager.ind_eval:
        assert pid in pmanager2.ind_eval
Exemple #2
0
def test_pages_proxy_manager_proxy_fracs():
    drange = [1970, 2000]
    test_dir = '/home/disk/p/wperkins/Research/LMR/tests'
    update_dict = {
        'core': {
            'recon_period': drange
        },
        'proxies': {
            'pages': {
                'datadir_proxy': test_dir,
                'datafile_proxy': 'test_pdata.pckl',
                'metafile_proxy': 'test_meta.pckl'
            },
            'proxy_frac': 0.5
        }
    }
    config = cfg.Config(**update_dict)

    pmanager = proxy2.ProxyManager(config, drange)
    assert len(pmanager.ind_assim) == 2
    assert len(pmanager.ind_eval) == 2

    update_dict['proxies']['proxy_frac'] = 0.0
    config = cfg.Config(**update_dict)
    pmanager = proxy2.ProxyManager(config, drange)
    assert len(pmanager.ind_assim) == 0
    assert len(pmanager.ind_eval) == 4
Exemple #3
0
def test_pages_proxy_manager_all():
    drange = [1970, 2000]
    test_dir = '/home/disk/p/wperkins/Research/LMR/tests'
    update_dict = {
        'core': {
            'recon_period': drange
        },
        'proxies': {
            'pages': {
                'datadir_proxy': test_dir,
                'datafile_proxy': 'test_pdata.pckl',
                'metafile_proxy': 'test_meta.pckl'
            },
            'proxy_frac': 1.0
        }
    }
    config = cfg.Config(**update_dict)

    pmanager = proxy2.ProxyManager(config, drange)
    assert pmanager.ind_eval is None
    assert len(pmanager.ind_assim) == 4

    bytype, allp = proxy2.ProxyPages.load_all(config, drange)

    for mobj, pobj in zip(pmanager.sites_assim_proxy_objs(), allp):
        assert mobj.id == pobj.id
Exemple #4
0
def test_pages_time_values_len_mismatch(seasons, dummy_proxy, config):
    with pytest.raises(AssertionError):
        p = dummy_proxy
        time = p.time[0:-2]
        pclass = proxy2.ProxyPages(config, p.pid, p.ptype, p.start_yr,
                                   p.end_yr, p.lat, p.lon, p.elev, seasons,
                                   p.values, time)
Exemple #5
0
def test_pages_load_site_no_preloaded(pdata, config):
    drange = [1980, 2000]
    start, end = drange
    pclass = proxy2.ProxyPages.load_site(config, 'Aus_16', drange)

    assert pclass.id == 'Aus_16'
    assert np.alltrue((pclass.time >= start) & (pclass.time <= end))
    assert pclass.type == r'Coral_d18O'
    assert pclass.lat == -21
    assert pclass.lon == proxy2.fix_lon(-160)
    assert pclass.resolution == 1.0
    assert pclass.elev == 0
    np.testing.assert_array_equal(
        pclass.values.values,
        pdata['Aus_16'][(pdata.index >= start) & (pdata.index <= end)
                        & pdata['Aus_16'].notnull()].values)
Exemple #6
0
def test_pages_init(dummy_proxy, config):
    p = dummy_proxy
    pclass = proxy2.ProxyPages(config, p.pid, p.ptype, p.start_yr, p.end_yr,
                               p.lat, p.lon, p.elev, p.seasonality, p.values,
                               p.time)

    assert pclass.id == p.pid
    assert pclass.type == p.ptype
    assert pclass.start_yr == p.start_yr
    assert pclass.end_yr == p.end_yr
    assert pclass.values == p.values
    assert pclass.lat == p.lat
    assert pclass.lon == p.lon
    assert pclass.elev == p.elev
    assert pclass.time == p.time
    assert pclass.resolution == p.resolution
    assert pclass.seasonality == p.seasonality
    assert hasattr(pclass, 'psm')
Exemple #7
0
def run(cfg_class=None, fcast_against=None, figure_dir=None):

    if cfg_class is None:
        if not LMR_config.LEGACY_CONFIG:
            if len(sys.argv) > 1:
                yaml_file = sys.argv[1]
            else:
                yaml_file = os.path.join(LMR_config.SRC_DIR, 'config.yml')

            LMR_config.initialize_config_yaml(LMR_config, yaml_file)

        LMR_config.proxies.proxy_frac = 1.0
        cfg_class = LMR_config

    cfg = cfg_class.Config()

    # Create figure directory
    if figure_dir is None:
        figure_dir = os.path.join('.', cfg.core.nexp + '_lim_figs')
    else:
        figure_dir = os.path.join(figure_dir, cfg.core.nexp)

    os.makedirs(figure_dir, exist_ok=True)

    recon_period = cfg.core.recon_period
    save_analysis_ye = cfg.prior.outputs['analysis_Ye']

    if not base_only:
        # Get the necessary averaging intervals for the gridded data
        prox_manager = LMR_proxy.ProxyManager(cfg.proxies,
                                              cfg.psm,
                                              recon_period,
                                              include_eval=save_analysis_ye)
        req_avg_intervals = prox_manager.avg_interval_by_psm_type
    else:
        req_avg_intervals = {}

    # Load the state
    state = LMR_gridded.State.from_config(cfg.prior, req_avg_intervals)

    base_keys, psm_req_keys = \
        LMR_gridded.PriorVariable.get_base_and_psm_req_vars(cfg.prior,
                                                            req_avg_intervals)

    lim_fcaster = LMR_forecaster.LIMForecaster.from_config(
        cfg.forecaster, base_keys, psm_req_var_keys=psm_req_keys)

    regrid_grid = cfg.prior.regrid_cfg.esmpy_regrid_to

    # load scalar factors for forecasting experiments
    var_key, grid_coords = next(iter(state.var_coords.items()))
    lat = grid_coords['lat']
    lon = grid_coords['lon']
    space_shp = state.var_space_shp[var_key]

    if plot_eofs:
        print('Plotting variable EOFs.')
        fig_fname = os.path.join(figure_dir,
                                 '{}_basis_eofs.png'.format(regrid_grid))
        dobj_eofs = {
            var_key: eofs[:, :plot_neofs]
            for var_key, eofs in lim_fcaster.var_eofs.items()
        }
        ptools.plot_exp_eofs(dobj_eofs,
                             state,
                             lim_fcaster.valid_data_mask,
                             var_eof_stats=lim_fcaster.var_eof_stats,
                             filename=fig_fname)

    if plot_state_eofs:
        print('Plotting multi-variable EOFs.')
        fig_fname = os.path.join(figure_dir,
                                 '{}_multivar_eofs.png'.format(regrid_grid))

        multivar_eofs = {}
        for var_key, var_eofs in lim_fcaster.var_eofs.items():
            multi_eof_var_span = lim_fcaster.var_span[var_key]
            vstart, vend = multi_eof_var_span
            state_eofs = lim_fcaster.calib_eofs[vstart:vend, :plot_neofs]

            multivar_eofs[var_key] = var_eofs @ state_eofs

        title = 'Multivar EOF_{:d}  Field: {}'

        ptools.plot_exp_eofs(
            multivar_eofs,
            state,
            lim_fcaster.valid_data_mask,
            multi_var_eof_stats=lim_fcaster.multi_var_eof_stats,
            filename=fig_fname,
            title=title)

    lim = lim_fcaster.lim

    if plot_lim_noise_eofs:
        fig_fname = os.path.join(figure_dir,
                                 '{}_noise_eofs.png'.format(regrid_grid))
        Q_evect = lim.Q_evects[:, :plot_num_noise_modes].real
        noise_eofs = {}
        for var_key, var_eofs in lim_fcaster.var_eofs.items():
            multi_eof_var_span = lim_fcaster.var_span[var_key]
            vstart, vend = multi_eof_var_span
            state_eofs = lim_fcaster.calib_eofs[vstart:vend, :]

            noise_eofs[var_key] = var_eofs @ state_eofs @ Q_evect

        title = 'Noise EOF_{:d}  Field: {}'

        ptools.plot_exp_eofs(noise_eofs,
                             state,
                             lim_fcaster.valid_data_mask,
                             filename=fig_fname,
                             title=title)

    if plot_lim_modes:
        print('Plotting LIM modes!')
        fig_fname = os.path.join(figure_dir,
                                 '{}_lim_fcast_modes.png'.format(regrid_grid))
        ptools.plot_multi_lim_modes(lim,
                                    lat,
                                    lon,
                                    space_shp,
                                    lim_fcaster,
                                    row_limit=plot_num_lim_modes,
                                    save_file=fig_fname)

    if do_perfect_fcast or do_ens_fcast:

        if fcast_against is not None:
            update_dict = {'prior_source': fcast_against, 'detrend': True}
        else:
            fcast_against = cfg_class.prior.prior_source
            update_dict = {'detrend': True}

        regrid_cfg = cfg_class.regrid()
        full_prior_cfg = cfg_class.prior(regrid_cfg, nens='all', **update_dict)

        state = LMR_gridded.State.from_config(
            full_prior_cfg, req_avg_intervals=req_avg_intervals)
        full_state_loaded = True

        reduced_state, compressed = \
        lim_fcaster.phys_space_data_to_fcast_space(state,
                                                   is_diff_model)

        start = fcast_start_yr
        end = start + reduced_state.shape[0]
        times = list(range(start, end))[1:]

        if include_psm_req_output:
            incl_psm_keys = psm_req_keys
        else:
            incl_psm_keys = None

        if do_perfect_fcast:
            perfect_fcast_verification(state,
                                       cfg,
                                       lim_fcaster,
                                       reduced_state,
                                       times,
                                       base_keys,
                                       fcast_against,
                                       compressed,
                                       fig_out_dir=figure_dir,
                                       psm_req_keys=incl_psm_keys)

        if do_ens_fcast:
            ens_fcast_verification(reduced_state,
                                   nens,
                                   lim,
                                   state,
                                   cfg,
                                   lim_fcaster,
                                   base_keys,
                                   fcast_against,
                                   fig_out_dir=figure_dir,
                                   psm_req_keys=incl_psm_keys)
    else:
        reduced_state, _ = lim_fcaster.phys_space_data_to_fcast_space(state)
        full_state_loaded = False
    if do_long_integration:

        t0 = reduced_state[0:1, :]

        # long integration with buffer of 50 years to forget initial state
        # 2880 timesteps ~ 3 hr timestep
        last = lutils.ens_long_integration(integration_iters,
                                           integration_len_yr + 50,
                                           lim,
                                           t0,
                                           timesteps=2880,
                                           use_multiprocess=False)

        last = last[50:]

        fname = 'long_integration_output_{}.npy'.format(regrid_grid)
        path = os.path.join(figure_dir, fname)
        np.save(path, last)

        # load scalar factors for forecasting experiments
        grid_coords = next(iter(state.var_coords.values()))
        latgrid = grid_coords['lat']
        longrid = grid_coords['lon']

        [scalar_factors, field_factors,
         base_scalar_factors] = get_scalar_factors(latgrid, longrid, cfg,
                                                   lim_fcaster, base_keys)

        if not full_state_loaded:
            regrid_cfg = cfg_class.regrid()
            full_prior_cfg = cfg_class.prior(regrid_cfg, nens='all')

            state = LMR_gridded.State.from_config(
                full_prior_cfg, req_avg_intervals=req_avg_intervals)

        # Calculate scalar output values defined in config
        scalar_outputs = {}
        ref_scalar_outputs = {}

        # Get SOI
        [scalar_factors, scalar_outputs
         ] = vutils.handle_soi_factors(scalar_factors, base_scalar_factors,
                                       scalar_outputs, state, last)
        soi_key, (soi_ref, soi_fcast) = scalar_outputs.popitem()
        soi_key = '_'.join(soi_key)
        scalar_outputs[soi_key] = soi_fcast
        ref_scalar_outputs[soi_key] = soi_ref

        for measure_key, factor in scalar_factors.items():

            ref_factor = base_scalar_factors[measure_key]
            var_key = measure_key[:-1]
            valid_data = lim_fcaster.valid_data_mask.get(var_key, None)
            ref_data = mutils.get_field_from_state(state,
                                                   var_key,
                                                   valid_data=valid_data)

            scalar_out = last @ factor
            measure_key = '_'.join(measure_key)
            scalar_outputs[measure_key] = scalar_out
            ref_scalar = ref_data @ ref_factor
            ref_scalar_outputs[measure_key] = ref_scalar

        scalar_fpath = os.path.join(figure_dir,
                                    'long_integration_scalar_out.npz')
        np.savez(scalar_fpath,
                 time=np.arange(integration_len_yr),
                 **scalar_outputs)

        ref_scalar_fpath = os.path.join(figure_dir,
                                        'ref_long_integration_scalar_out.npz')
        np.savez(ref_scalar_fpath, **ref_scalar_outputs)
Exemple #8
0
def test_proxies_get_class():
    assert type(proxy2.get_proxy_class('pages')) == type(proxy2.ProxyPages)
Exemple #9
0
def test_abstract_class_creation():
    with pytest.raises(TypeError):
        x = proxy2.BaseProxyObject()
Exemple #10
0
def test_pages_none_values(seasons, dummy_proxy, config):
    with pytest.raises(ValueError):
        p = dummy_proxy
        pclass = proxy2.ProxyPages(config, p.pid, p.ptype, p.start_yr,
                                   p.end_yr, p.lat, p.lon, p.elev, seasons,
                                   None, p.time)
Exemple #11
0
def main():
    # TODO Fix configuration so next line is unnecessary
    LMR_config.proxies.LMRdb.dbversion = db_version

    regrid_config = LMR_config.regrid(**regrid_kwargs)
    psm_config = LMR_config.psm(regrid_config, lmr_path=lmr_path,
                                proxy_use_from=use_from,
                                **psm_cfg_kwargs)
    proxy_config = LMR_config.proxies(lmr_path=lmr_path,
                                      **proxy_kwargs)

    begin_time = time()

    proxy_database = proxy_config.use_from

    print('Proxies             :', proxy_database)
    print('PSM type            :', psm_type)
    print('Calib. period       :', psm_config.calib_period)
    print('Anom. ref. period   :', psm_config.anom_reference_period)

    if not (proxy_database == 'PAGES2kv1' or proxy_database == 'LMRdb'):
        raise KeyError(f'Proxy database, {proxy_database}, is not a '
                       f'valid database key.')

    if psm_type == 'bilinear':
        psm_file = psm_config.bilinear.pre_calib_datafile
    elif psm_type == 'linear':
        psm_file = psm_config.linear.pre_calib_datafile
    else:
        raise KeyError(f'Designated psm_type, {psm_type}, is not a valid key.')

    # corresponding file containing complete diagnostics
    psm_file_diag = psm_file.replace('.pckl', '_diag.pckl')

    # Check if psm_file already exists, archive it with current date/time if
    # it exists and replace by new file
    if os.path.isfile(psm_file):        
        nowstr = datetime.datetime.now().strftime("%Y%m%d:%H%M")
        no_file_ext = psm_file.rstrip('.pckl')
        command = 'mv {} {}_{}.pckl'.format(psm_file, no_file_ext, nowstr)
        os.system(command)
        if os.path.isfile(psm_file_diag):
            diag_no_file_ext = psm_file_diag.rstrip('.pckl')
            command = 'mv {} {}_{}.pckl'.format(psm_file_diag,
                                                diag_no_file_ext,
                                                nowstr)
            os.system(command)

    proxy_class = LMR_proxy.get_proxy_class(use_from)

    proxies = proxy_class.load_all_annual_no_filtering(proxy_config, psm_config)

    if test_proxy_seasonality:

        test_proxies = []
        non_test_proxies = []
        for proxy in proxies:
            if proxy.type in test_season_proxy_types:
                test_proxies.append(proxy)
            else:
                non_test_proxies.append(proxy)

        test_proxies = calib_seasonality_test(test_proxies, psm_config,
                                              test_seasons, psm_type)

        non_test_proxies = calib_default_seasonality(non_test_proxies,
                                                     psm_config,
                                                     psm_type,
                                                     default_season)
        proxies = test_proxies + non_test_proxies

    save_calib_no_testing(proxies, psm_file, psm_file_diag, psm_type)

    end_time = time() - begin_time
    print('=========================================================')
    print('PSM calibration completed in '+ str(end_time/60.0)+' mins')
    print('=========================================================')
Exemple #12
0
def LMR_driver_callable(cfg=None):

    if cfg is None:
        cfg = BaseCfg.Config()  # Use base configuration from LMR_config

    # Temporary fix for old 'state usage'
    core_cfg = cfg.core
    prior_cfg = cfg.prior
    output_avg_interval = prior_cfg.avg_interval

    # verbose controls print comments (0 = none; 1 = most important;
    #  2 = many; 3 = a lot; >=4 = all)
    verbose = cfg.LOG_LEVEL

    nexp = core_cfg.nexp
    workdir = core_cfg.datadir_output
    recon_period = core_cfg.recon_period
    online = core_cfg.online_reconstruction
    assim_solver_key = core_cfg.assimilation_solver
    hybrid_update = core_cfg.hybrid_update
    hybrid_a_val = core_cfg.hybrid_a
    blend_prior = core_cfg.blend_prior
    reg_inf = core_cfg.reg_inflate
    inf_factor = core_cfg.inflation_factor
    nens = core_cfg.nens
    inflation_fact = core_cfg.inflation_factor
    outputs = prior_cfg.outputs
    save_analysis_ye = outputs['analysis_Ye']
    save_prior_ye = outputs['prior_Ye']

    # ==========================================================================
    # <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< MAIN CODE >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    # ==========================================================================
    # TODO: AP Logging instead of print statements
    if verbose > 0:
        print('')
        print('=====================================================')
        print('Running LMR reconstruction...')
        print('=====================================================')
        print('Name of experiment: ', nexp)
        print('')

    begin_time = time()

    # Define the number of years of the reconstruction (nb of assimilation
    # times)
    ntimes = recon_period[1] - recon_period[0] + 1
    recon_times = np.arange(recon_period[0], recon_period[1] + 1)

    # ==========================================================================
    # Get information on proxies to assimilate ---------------------------------
    # ==========================================================================

    begin_time_proxy_load = time()
    if verbose > 0:
        print('')
        print('-----------------------------------')
        print('Uploading proxy data & PSM info ...')
        print('-----------------------------------')

    # Build dictionaries of proxy sites to assimilate and those set aside for
    # verification
    prox_manager = LMR_proxy.ProxyManager(cfg.proxies,
                                          cfg.psm,
                                          recon_period,
                                          include_eval=save_analysis_ye)
    req_avg_intervals = prox_manager.avg_interval_by_psm_type

    type_site_assim = prox_manager.assim_ids_by_group
    # count the total number of proxies
    assim_proxy_count = len(prox_manager.ind_assim)
    # count the total witheld proxies
    if prox_manager.ind_eval:
        eval_proxy_count = len(prox_manager.ind_eval)
    else:
        eval_proxy_count = None

    if verbose > 0:
        print('Assimilating proxy types/sites:', type_site_assim)
        print(
            '--------------------------------------------------------------------'
        )
        print('Proxy counts for experiment:')
        for pkey, plist in sorted(type_site_assim.items()):
            print(('%45s : %5d' % (pkey, len(plist))))
        print(('%45s : %5d' % ('TOTAL', assim_proxy_count)))
        print(
            '--------------------------------------------------------------------'
        )

    if verbose > 2:
        proxy_load_time = time() - begin_time_proxy_load
        print('-----------------------------------------------------')
        print('Loading completed in ' + str(proxy_load_time) + ' seconds')
        print('-----------------------------------------------------')

    # ==========================================================================
    # Load prior data ----------------------------------------------------------
    # ==========================================================================
    if verbose > 0:
        print('-------------------------------------------')
        print('Uploading gridded (model) data as prior ...')
        print('-------------------------------------------')
        print('Source for prior: ', prior_cfg.prior_source)

    # Create initial state vector of desired variables at smallest time res
    Xb_one = LMR_gridded.State.from_config(prior_cfg,
                                           req_avg_intervals=req_avg_intervals)

    [calc_and_store_scalars,
     scalar_containers] = \
        lmr_out.prepare_scalar_calculations(outputs['scalar_ens'], Xb_one,
                                            prior_cfg, ntimes, nens)

    [field_zarr_outputs, field_get_ens_func
     ] = lmr_out.prepare_field_output(outputs, Xb_one, ntimes, nens, workdir,
                                      recon_times)

    load_time = time() - begin_time
    if verbose > 2:
        print('-----------------------------------------------------')
        print('Loading completed in ' + str(load_time) + ' seconds')
        print('-----------------------------------------------------')

    # check covariance inflation from config
    if reg_inf and verbose > 2:
        print(('\nUsing covariance inflation factor: %8.2f' % inflation_fact))

    if save_prior_ye:

        if online:
            use_ntimes = ntimes
        else:
            use_ntimes = 1

        xb_ye_out = _get_assim_eval_ye_outputs(use_ntimes, nens, recon_period,
                                               workdir,
                                               'prior_ye_ens_output.zarr',
                                               assim_proxy_count,
                                               eval_proxy_count)
        assim_xb_ye_out, eval_xb_ye_out = xb_ye_out
    else:
        assim_xb_ye_out = eval_xb_ye_out = None

    if save_analysis_ye:

        xb_ye_out = _get_assim_eval_ye_outputs(ntimes, nens, recon_period,
                                               workdir,
                                               'posterior_ye_ens_output.zarr',
                                               assim_proxy_count,
                                               eval_proxy_count)
        assim_xa_ye_out, eval_xa_ye_out = xb_ye_out
    else:
        assim_xa_ye_out = eval_xa_ye_out = None

    # ----------------------------------
    # Augment state vector with the Ye's
    # ----------------------------------

    # TODO: Figure out how to handle precalculated YE Vals
    # Extract all the Ye's from master list of proxy objects into numpy array
    ye_all = LMR_proxy.calc_assim_ye_vals(prox_manager, Xb_one)
    Xb_one.augment_state(ye_all)

    if save_prior_ye:
        assim_xb_ye_out[:, 0] = ye_all

        if eval_proxy_count is not None:
            eval_ye = LMR_proxy.calc_eval_ye_vals(prox_manager, Xb_one)
            eval_xb_ye_out[:, 0] = eval_ye

    # TODO: replicate single variable prior saving

    # Initialize forecaster for online reconstructions
    if online:
        print('\n Initializing LMR forecasting for online reconstruction')
        key = cfg.forecaster.use_forecaster
        fcastr_class = LMR_forecaster.get_forecaster_class(key)
        forecaster = fcastr_class.from_config(cfg.forecaster, Xb_one.var_keys)

    # Get the solver for the assimilation update
    assim_solver_func = get_solver(assim_solver_key)

    # ==========================================================================
    # Loop over all years and proxies, and perform assimilation ----------------
    # ==========================================================================
    Xb_one.stash_state('orig_aug')

    start_yr, end_yr = recon_period
    assim_times = np.arange(start_yr, end_yr + 1)

    # ---------------------
    # Loop over proxy types
    # ---------------------
    for iyr, t in enumerate(assim_times):

        if verbose > 0:
            print('working on year: ' + str(t))

        # TODO: I feel like this should be moved into LMR_DA?
        if hybrid_update and online and assim_solver_key == 'serial':
            # Get static climatological Xb_one and blend prior if desired
            [Xb_static,
             Xb_one] = process_hybrid_static_prior(iyr, Xb_one, blend_prior,
                                                   hybrid_a_val)
            solver_kwargs = {
                'Xb_static': Xb_static,
                'hybrid_a_val': hybrid_a_val
            }
        else:
            Xb_static = None
            solver_kwargs = {}

        # Save output fields for the prior
        lmr_out.save_field_output(iyr,
                                  'prior',
                                  Xb_one,
                                  field_zarr_outputs,
                                  output_def=outputs['prior'])

        # Save prior Ye values
        if save_prior_ye and online and iyr != 0:
            assim_ye = Xb_one.get_var_data('ye_vals')
            assim_xb_ye_out[:, iyr] = assim_ye

            if eval_proxy_count is not None:
                eval_ye = LMR_proxy.calc_eval_ye_vals(prox_manager, Xb_one)
                eval_xb_ye_out[:, iyr] = eval_ye

        # Gather proxies / Ye values for the year
        [p_vals, p_errs, valid_pidxs] = get_valid_proxies_info(t, prox_manager)

        ye_start_idx, _ = Xb_one.var_view_range['ye_vals']

        # Update Xb with each proxy
        Xa = assim_solver_func(Xb_one.state,
                               p_vals,
                               p_errs,
                               valid_pidxs,
                               ye_start_idx,
                               verbose=False,
                               **solver_kwargs)

        Xb_one.state = Xa

        # Calculate and store index values from field
        calc_and_store_scalars(Xb_one, iyr)
        # Calculate and store posterior field reductions
        lmr_out.save_field_output(iyr,
                                  'posterior',
                                  Xb_one,
                                  field_zarr_outputs,
                                  output_def=outputs['posterior'])

        # Save field ensemble members
        if outputs['field_ens_output'] is not None:
            lmr_out.save_field_output(iyr,
                                      'field_ens_output',
                                      Xb_one,
                                      field_zarr_outputs,
                                      ens_out_funcs=field_get_ens_func)

        # Save posterior Ye Information
        if save_analysis_ye:
            assim_ye = Xb_one.get_var_data('ye_vals')
            assim_xa_ye_out[:, iyr] = assim_ye

            if eval_proxy_count is not None:
                eval_ye = LMR_proxy.calc_eval_ye_vals(prox_manager, Xb_one)
                eval_xa_ye_out[:, iyr] = eval_ye

        if online:

            forecaster.forecast(Xb_one)

            # Inflation Adjustment
            if reg_inf:
                Xb_one.reg_inflate_xb(inf_factor)

            # Recalculate Ye values
            ye_all = LMR_proxy.calc_assim_ye_vals(prox_manager, Xb_one)
            Xb_one.reset_augmented_ye(ye_all)
        else:
            Xb_one.stash_recall_state_list('orig_aug', copy=True)

    end_time = time() - begin_time

    # End of loop on proxy types
    if verbose > 0:
        print('')
        print('=====================================================')
        print('Reconstruction completed in ' + str(end_time / 60.0) + ' mins')
        print('=====================================================')

    if verbose > 0:
        if assim_xa_ye_out is not None:
            print('-----------------------------------')
            print('Assimilated proxy Ye output info...')
            print(assim_xa_ye_out.info)
            print('-----------------------------------')

        if eval_xa_ye_out is not None:
            print('-----------------------------------')
            print('Witheld proxy Ye output info...')
            print(eval_xa_ye_out.info)
            print('-----------------------------------')

    # Save Scalar information and proxies assimilated/withheld
    lmr_out.save_scalar_ensembles(workdir, recon_times, scalar_containers)
    lmr_out.save_recon_proxy_information(prox_manager, workdir)

    exp_end_time = time() - begin_time
    if verbose > 0:
        print('')
        print('=====================================================')
        print('Experiment completed in ' + str(exp_end_time / 60.0) + ' mins')
        print('=====================================================')