def test_bootstrap_control_variate_estimator(self):
        example = TunableModelEnsemble(np.pi / 2 * 0.95)
        model_ensemble = pya.ModelEnsemble(example.models)

        univariate_variables = [uniform(-1, 2), uniform(-1, 2)]
        variable = pya.IndependentMultivariateRandomVariable(
            univariate_variables)

        cov_matrix = example.get_covariance_matrix()
        model_costs = [1, 0.5, 0.4]
        est = ACVMF(cov_matrix, model_costs)

        target_cost = 1000
        nhf_samples, nsample_ratios = est.allocate_samples(target_cost)[:2]
        generate_samples = partial(pya.generate_independent_random_samples,
                                   variable)
        samples, values = est.generate_data(nhf_samples, nsample_ratios,
                                            generate_samples, model_ensemble)

        mc_cov_matrix = compute_covariance_from_control_variate_samples(values)
        #assert np.allclose(cov_matrix,mc_cov_matrix,atol=1e-2)
        est = ACVMF(mc_cov_matrix, model_costs)
        weights = get_mfmc_control_variate_weights(
            example.get_covariance_matrix())
        bootstrap_mean,bootstrap_variance = \
            pya.bootstrap_mfmc_estimator(values,weights,10000)

        est_mean = est(values)
        est_variance = est.get_variance(nhf_samples, nsample_ratios)
        print(abs((est_variance - bootstrap_variance) / est_variance))
        assert abs((est_variance - bootstrap_variance) / est_variance) < 6e-2
    def test_generate_samples_and_values_mfmc(self):
        functions = ShortColumnModelEnsemble()
        model_ensemble = pya.ModelEnsemble(
            [functions.m0, functions.m1, functions.m2])
        univariate_variables = [
            uniform(5, 10),
            uniform(15, 10),
            norm(500, 100),
            norm(2000, 400),
            lognorm(s=0.5, scale=np.exp(5))
        ]
        variable = pya.IndependentMultivariateRandomVariable(
            univariate_variables)
        generate_samples = partial(pya.generate_independent_random_samples,
                                   variable)

        nhf_samples = 10
        nsample_ratios = [2, 4]
        samples,values =\
            pya.generate_samples_and_values_mfmc(
                nhf_samples,nsample_ratios,model_ensemble,generate_samples)

        for jj in range(1, len(samples)):
            assert samples[jj][1].shape[1] == nsample_ratios[jj -
                                                             1] * nhf_samples
            idx = 1
            if jj == 1:
                idx = 0
            assert np.allclose(samples[jj][0], samples[jj - 1][idx])
Esempio n. 3
0
def setup_check_variance_reduction_model_ensemble_short_column(
        nmodels=5,npilot_samples=None):
    example = ShortColumnModelEnsemble()
    model_ensemble = pya.ModelEnsemble(
        [example.models[ii] for ii in range(nmodels)])
    univariate_variables = [
        uniform(5,10),uniform(15,10),norm(500,100),norm(2000,400),
        lognorm(s=0.5,scale=np.exp(5))]
    variable=pya.IndependentMultivariateRandomVariable(univariate_variables)
    generate_samples=partial(
        pya.generate_independent_random_samples,variable)

    if npilot_samples is not None:
        # The number of pilot samples effects ability of numerical estimate
        # of variance reduction to match theoretical value
        cov, samples, weights = pya.estimate_model_ensemble_covariance(
            npilot_samples,generate_samples,model_ensemble)
    else:
        # it is difficult to create a quadrature rule for the lognormal
        # distribution so instead define the variable as normal and then
        # apply log transform
        univariate_variables = [
            uniform(5,10),uniform(15,10),norm(500,100),norm(2000,400),
            norm(loc=5,scale=0.5)]
        variable=pya.IndependentMultivariateRandomVariable(
            univariate_variables)

        example.apply_lognormal=True
        cov = example.get_covariance_matrix(variable)[:nmodels,:nmodels]
        example.apply_lognormal=False
        
    return model_ensemble, cov, generate_samples
def setup_check_variance_reduction_model_ensemble_polynomial():
    example = PolynomialModelEnsemble()
    model_ensemble = pya.ModelEnsemble(example.models)
    cov = example.get_covariance_matrix()
    #npilot_samples=int(1e6)
    #cov, samples, weights = pya.estimate_model_ensemble_covariance(
    #    npilot_samples,generate_samples,model_ensemble)
    return model_ensemble, cov, example.generate_samples
    def test_rsquared_mfmc(self):
        functions = ShortColumnModelEnsemble()
        model_ensemble = pya.ModelEnsemble(
            [functions.m0, functions.m3, functions.m4])
        univariate_variables = [
            uniform(5, 10),
            uniform(15, 10),
            norm(500, 100),
            norm(2000, 400),
            lognorm(s=0.5, scale=np.exp(5))
        ]
        variable = pya.IndependentMultivariateRandomVariable(
            univariate_variables)
        generate_samples = partial(pya.generate_independent_random_samples,
                                   variable)
        npilot_samples = int(1e4)
        pilot_samples = generate_samples(npilot_samples)
        config_vars = np.arange(model_ensemble.nmodels)[np.newaxis, :]
        pilot_samples = pya.get_all_sample_combinations(
            pilot_samples, config_vars)
        pilot_values = model_ensemble(pilot_samples)
        pilot_values = np.reshape(pilot_values,
                                  (npilot_samples, model_ensemble.nmodels))
        cov = np.cov(pilot_values, rowvar=False)

        nhf_samples = 10
        nsample_ratios = np.asarray([2, 4])

        nsamples_per_model = np.concatenate([[nhf_samples],
                                             nsample_ratios * nhf_samples])

        eta = pya.get_mfmc_control_variate_weights(cov)
        cor = pya.get_correlation_from_covariance(cov)
        var_mfmc = cov[0, 0] / nsamples_per_model[0]
        for k in range(1, model_ensemble.nmodels):
            var_mfmc += (1 / nsamples_per_model[k - 1] -
                         1 / nsamples_per_model[k]) * (
                             eta[k - 1]**2 * cov[k, k] + 2 * eta[k - 1] *
                             cor[0, k] * np.sqrt(cov[0, 0] * cov[k, k]))

        assert np.allclose(var_mfmc / cov[0, 0] * nhf_samples,
                           1 - pya.get_rsquared_mfmc(cov, nsample_ratios))
def setup_check_variance_reduction_model_ensemble_tunable():
    example = TunableModelEnsemble(np.pi / 4)
    model_ensemble = pya.ModelEnsemble(example.models)
    cov = example.get_covariance_matrix()
    return model_ensemble, cov, example.generate_samples
Esempio n. 7
0
With this choice of weights the variance reduction obtained is given by

.. math:: \gamma = 1-\rho_1^2\left(\frac{r_1-1}{r_1}+\sum_{\alpha=2}^M \frac{r_\alpha-r_{\alpha-1}}{r_\alpha r_{\alpha-1}}\frac{\rho_\alpha^2}{\rho_1^2}\right)

Let us use MFMC to estimate the mean of our high-fidelity model.
"""
import numpy as np
import matplotlib.pyplot as plt
import pyapprox as pya
from functools import partial
from pyapprox.tests.test_control_variate_monte_carlo import \
    TunableModelEnsemble, ShortColumnModelEnsemble, PolynomialModelEnsemble
np.random.seed(1)

short_column_model = ShortColumnModelEnsemble()
model_ensemble = pya.ModelEnsemble(
    [short_column_model.m0, short_column_model.m1, short_column_model.m2])

costs = np.asarray([100, 50, 5])
target_cost = int(1e4)
idx = [0, 1, 2]
cov = short_column_model.get_covariance_matrix()[np.ix_(idx, idx)]

# define the sample allocation
nhf_samples, nsample_ratios = pya.allocate_samples_mfmc(
    cov, costs, target_cost)[:2]
# generate sample sets
samples, values = pya.generate_samples_and_values_mfmc(
    nhf_samples, nsample_ratios, model_ensemble,
    short_column_model.generate_samples)
# compute mean using only hf data
hf_mean = values[0][0].mean()
Esempio n. 8
0
.. math:: f_\alpha(\rv)=\rv^{5-\alpha}, \quad \alpha=0,\ldots,4

where each model is the function of a single uniform random variable defined on the unit interval :math:`[0,1]`.

The following code computes the variance of the MLMC estimator for different target costs using the optimal sample allocation using an exact estimate of the covariance between models and an approximation.
"""

import numpy as np
import pyapprox as pya
import matplotlib.pyplot as plt
from pyapprox.tests.test_control_variate_monte_carlo import \
    PolynomialModelEnsemble
np.random.seed(1)

poly_model = PolynomialModelEnsemble()
model_ensemble = pya.ModelEnsemble(poly_model.models)
cov = poly_model.get_covariance_matrix()
target_costs = np.array([1e1, 1e2, 1e3, 1e4], dtype=int)
costs = np.asarray([10**-ii for ii in range(cov.shape[0])])
model_labels = [r'$f_0$', r'$f_1$', r'$f_2$', r'$f_3$', r'$f_4$']


def plot_mlmc_error():
    """
    Define function to create plot so we can create it later and add
    other estimator error curves.

    Note this is only necessary for sphinx-gallery docs. If just using the 
    jupyter notebooks they create then we do not need this function definition 
    and simply need to call fig at end of next cell to regenerate plot.
    """
Esempio n. 9
0
cov = model.get_covariance_matrix()
nhf_samples = int(1e1)

generate_samples_and_values = pya.generate_samples_and_values_acv_IS
get_cv_weights = partial(
    pya.get_approximate_control_variate_weights,
    get_discrepancy_covariances=pya.get_discrepancy_covariances_IS)
get_rsquared = partial(
    pya.get_rsquared_acv,
    get_discrepancy_covariances=pya.get_discrepancy_covariances_IS)

#%%
# First let us just use 2 models

print('Two models')
model_ensemble = pya.ModelEnsemble(model.models[:2])
nsample_ratios = [10]
allocate_samples = \
    lambda cov, costs, target_cost : [nhf_samples, nsample_ratios, None]
means1, numerical_var_reduction1, true_var_reduction1 = \
    pya.estimate_variance_reduction(
        model_ensemble, cov[:2,:2], model.generate_samples, allocate_samples,
        generate_samples_and_values, get_cv_weights, get_rsquared, ntrials=1e3,
        max_eval_concurrency=1)
print("Theoretical ACV variance reduction", true_var_reduction1)
print("Achieved ACV variance reduction", numerical_var_reduction1)

#%%
# Now let us use 3 models

print('Three models')
   f_4(z)&=(1 - M/(b(h^2)Y) - (P(1 + M)/(hY))^2)

where :math:`z = (b,h,P,M,Y)^T`
"""
import pyapprox as pya
import numpy as np
import matplotlib.pyplot as plt
from pyapprox.tests.test_control_variate_monte_carlo import \
    TunableModelEnsemble, ShortColumnModelEnsemble, PolynomialModelEnsemble
from scipy.stats import uniform
from functools import partial
from scipy.stats import uniform, norm, lognorm
np.random.seed(1)

short_column_model = ShortColumnModelEnsemble()
model_ensemble = pya.ModelEnsemble(
    [short_column_model.m0, short_column_model.m1, short_column_model.m2])

costs = np.asarray([100, 50, 5])
target_cost = int(1e4)
idx = [0, 1, 2]
cov = short_column_model.get_covariance_matrix()[np.ix_(idx, idx)]
# generate pilot samples to estimate correlation
# npilot_samples = int(1e4)
# cov = pya.estimate_model_ensemble_covariance(
#    npilot_samples,short_column_model.generate_samples,model_ensemble)[0]

# define the sample allocation
nhf_samples, nsample_ratios = pya.allocate_samples_mlmc(
    cov, costs, target_cost)[:2]
# generate sample sets
samples, values = pya.generate_samples_and_values_mlmc(