Esempio n. 1
0
    def test_exponential_growth_model_bayesian_d_optimal_design(self):
        """
        See Table 2 in  Dietrich Braess and Holger Dette. 
        On the number of support points of maximin and Bayesian optimal designs,
        2007 dx.doi.org/10.1214/009053606000001307
        """
        num_design_pts = 50
        optimal_design_samples = {
            10: ([0.182], [1.0]),
            40: ([0.048, 0.354], [0.981, 0.019]),
            50: ([0.038, 0.318], [0.973, 0.027]),
            100: ([0.019, 0.215], [.962, 0.038]),
            200: ([0.010, 0.134], [0.959, 0.041]),
            300: ([0.006, 0.084, 0.236], [0.957, 0.037, 0.006]),
            3000: ([0.0006, 0.009, 0.055, 1.000], [0.951, 0.039, 0.006, 0.004])
        }
        lb2 = 1
        for ub2 in optimal_design_samples.keys():
            # assuming middle of parameter domain is used to find local design
            design_samples = np.linspace(0, 1, num_design_pts)
            design_samples = np.sort(
                np.unique(
                    np.concatenate(
                        [design_samples, optimal_design_samples[ub2][0]])))
            noise_multiplier = None

            local_design_factors = \
                lambda p,x: exponential_growth_model_grad_parameters(p,x).T
            xx2, ww2 = pya.gauss_jacobi_pts_wts_1D(40, 0, 0)
            xx2 = (xx2 + 1) / 2 * (
                ub2 - lb2) + lb2  # transform from [-1,1] to [lb2,ub2]
            parameter_samples = xx2[np.newaxis, :]

            opt_problem = AlphabetOptimalDesign('D',
                                                local_design_factors,
                                                opts=None)

            mu, res = opt_problem.solve_nonlinear_bayesian(
                parameter_samples,
                design_samples[np.newaxis, :],
                sample_weights=ww2,
                options={
                    'iprint': 0,
                    'ftol': 1e-14,
                    'disp': True,
                    'tol': 1e-12
                },
                return_full=True)
            I = np.where(mu > 1e-5)[0]
            J = np.nonzero(design_samples == np.array(
                optimal_design_samples[ub2][0])[:, None])[1]
            mu_paper = np.zeros(design_samples.shape[0])
            mu_paper[J] = optimal_design_samples[ub2][1]
            # published designs are not optimal for larger values of ub2
            if I.shape == J.shape and np.allclose(I, J):
                assert np.allclose(mu[I],
                                   optimal_design_samples[ub2][1],
                                   rtol=3e-2)
            assert (res.obj_fun(mu) <= res.obj_fun(mu_paper) + 1e-6)
Esempio n. 2
0
    def test_heteroscedastic_quantile_bayesian_doptimal_design(self):
        """
        Create D-optimal designs, for least squares regression with 
        homoscedastic noise, and compare to known analytical solutions.
        See Theorem 4.3 in Dette & Trampisch, Optimal Designs for Quantile 
        Regression Models https://doi.org/10.1080/01621459.2012.695665
        """
        poly_degree = 2
        num_design_pts = 100
        x_lb, x_ub = 1e-3, 2000
        design_samples = np.linspace(x_lb, x_ub, num_design_pts)
        design_samples = np.sort(np.concatenate([design_samples, [754.4]]))
        n = 1  # possible values -2,1,0,1

        def link_function(z):
            return 1 / z**n

        def noise_multiplier(p, x):
            return link_function(michaelis_menten_model(p, x))
        local_design_factors = \
            lambda p, x: michaelis_menten_model_grad_parameters(p, x).T
        xx1 = np.array([10])  # theta_1 does not effect optimum
        #xx2 = np.linspace(100,2000,50)
        #parameter_samples = cartesian_product([xx1,xx2])

        p_lb, p_ub = 100, 2000
        local_design_factors = \
            lambda p, x: michaelis_menten_model_grad_parameters(p, x).T
        xx2, ww2 = pya.gauss_jacobi_pts_wts_1D(20, 0, 0)
        # transform from [-1,1] to [p_lb,p_ub]
        xx2 = (xx2 + 1) / 2 * (p_ub - p_lb) + p_lb
        parameter_samples = cartesian_product([xx1, xx2])

        opt_problem = AlphabetOptimalDesign('D',
                                            local_design_factors,
                                            noise_multiplier=noise_multiplier,
                                            regression_type='quantile')

        mu, res = opt_problem.solve_nonlinear_bayesian(
            parameter_samples,
            design_samples[np.newaxis, :],
            sample_weights=ww2,
            options={
                'iprint': 0,
                'ftol': 1e-8,
                'disp': True
            },
            return_full=True)

        # vals = []
        # for ii in range(design_samples.shape[0]):
        #     xopt = np.zeros(design_samples.shape[0])+1e-8;
        #     #xopt[-1]=.5; xopt[ii]=.5
        #     vals.append(res.obj_fun(xopt))
        #plt.plot(design_samples,vals); plt.show()

        I = np.where(mu > 1e-5)[0]
        assert np.allclose(design_samples[I], [754.4, x_ub])
        assert np.allclose(mu[I], [0.5, 0.5])
Esempio n. 3
0
def univariate_quadrature_rule(n):
    x, w = pya.gauss_jacobi_pts_wts_1D(n, 0, 0)
    x *= 2
    return x, w