示例#1
0
    def test_homoscedastic_ioptimality_criterion(self):
        poly_degree = 10
        num_design_pts = 101
        num_pred_pts = 51
        pred_samples = np.random.uniform(-1, 1, num_pred_pts)
        # TODO check if design factors may have to be a subset of pred_factors
        #pred_factors=univariate_monomial_basis_matrix(poly_degree,pred_samples)
        #assert num_design_pts<=pred_factors.shape[0]
        #design_factors = pred_factors[:num_design_pts,:]
        design_samples = np.linspace(-1, 1, num_design_pts)
        design_factors = univariate_monomial_basis_matrix(
            poly_degree, design_samples)
        pred_factors = univariate_monomial_basis_matrix(
            poly_degree, pred_samples)
        homog_outer_prods = compute_homoscedastic_outer_products(
            design_factors)
        ioptimality_criterion_wrapper = partial(ioptimality_criterion,
                                                homog_outer_prods,
                                                design_factors, pred_factors)
        diffs = check_derivative(ioptimality_criterion_wrapper, num_design_pts)
        assert diffs.min() < 6e-5, diffs

        mu = np.random.uniform(0, 1, (num_design_pts))
        mu /= mu.sum()
        M1 = homog_outer_prods.dot(mu)
        u = np.linalg.solve(M1, pred_factors.T)
        assert np.allclose(
            np.diag(pred_factors.dot(u)).mean(),
            ioptimality_criterion_wrapper(mu, return_grad=False))
示例#2
0
    def test_homoscedastic_least_squares_doptimal_design(self):
        """
        Create D-optimal designs, for least squares regression with 
        homoscedastic noise, and compare to known analytical solutions.
        See Section 5 of Wenjie Z, Computing Optimal Designs for Regression 
        Modelsvia Convex Programming, Ph.D. Thesis, 2012
        """
        poly_degree = 2
        num_design_pts = 7
        design_samples = np.linspace(-1, 1, num_design_pts)
        noise_multiplier = None
        design_factors = univariate_monomial_basis_matrix(
            poly_degree, design_samples)

        opt_problem = AlphabetOptimalDesign('D', design_factors)
        mu = opt_problem.solve({'iprint': 1, 'ftol': 1e-8})
        I = np.where(mu > 1e-5)[0]
        assert np.allclose(I, [0, 3, 6])
        assert np.allclose(np.ones(3) / 3, mu[I])

        #See J.E. Boon, Generating Exact D-Optimal Designs for Polynomial Models
        #2007. For how to derive analytical solution for this test case

        poly_degree = 3
        num_design_pts = 30
        design_samples = np.linspace(-1, 1, num_design_pts)
        noise_multiplier = None
        design_factors = univariate_monomial_basis_matrix(
            poly_degree, design_samples)
        opt_problem = AlphabetOptimalDesign('D', design_factors)
        mu = opt_problem.solve({'iprint': 1, 'ftol': 1e-8})
        I = np.where(mu > 1e-5)[0]
        assert np.allclose(I, [0, 8, 21, 29])
        assert np.allclose(0.25 * np.ones(4), mu[I])
    def test_krawtchouk_binomial(self):
        degree = 4
        num_trials = 10
        prob_success = 0.5
        ab = krawtchouk_recurrence(degree + 1, num_trials, prob_success)
        x, w = gauss_quadrature(ab, degree + 1)

        probability_mesh = np.arange(0, num_trials + 1, dtype=float)
        probability_masses = binom.pmf(probability_mesh, num_trials,
                                       prob_success)

        basis_mat = evaluate_orthonormal_polynomial_1d(probability_mesh,
                                                       degree, ab)
        assert np.allclose(
            (basis_mat * probability_masses[:, None]).T.dot(basis_mat),
            np.eye(basis_mat.shape[1]))

        coef = np.random.uniform(-1, 1, (degree + 1))
        basis_matrix_at_pm = univariate_monomial_basis_matrix(
            degree, probability_mesh)
        vals_at_pm = basis_matrix_at_pm.dot(coef)
        basis_matrix_at_gauss = univariate_monomial_basis_matrix(degree, x)
        vals_at_gauss = basis_matrix_at_gauss.dot(coef)

        true_mean = vals_at_pm.dot(probability_masses)
        quadrature_mean = vals_at_gauss.dot(w)
        # print (true_mean, quadrature_mean)
        assert np.allclose(true_mean, quadrature_mean)
示例#4
0
    def test_hetroscedastic_goptimality_criterion(self):
        """
        Test homoscedastic and hetroscedastic API produce same value
        when noise is homoscedastic
        """
        poly_degree = 10
        num_design_pts = 101
        design_samples = np.linspace(-1, 1, num_design_pts)
        noise_multiplier = design_samples**2 + 1
        pred_samples = np.random.uniform(-1, 1, 51)
        design_factors = univariate_monomial_basis_matrix(
            poly_degree, design_samples)
        pred_factors = univariate_monomial_basis_matrix(
            poly_degree, pred_samples)
        homog_outer_prods = compute_homoscedastic_outer_products(
            design_factors)
        goptimality_criterion_wrapper = partial(
            goptimality_criterion,
            homog_outer_prods,
            design_factors,
            pred_factors,
            noise_multiplier=noise_multiplier)

        # Test hetroscedastic API gradients are correct
        diffs = check_derivative(goptimality_criterion_wrapper, num_design_pts)
        assert diffs.min() < 6e-5, diffs

        # Test quantile regression gradients
        goptimality_criterion_wrapper = partial(
            goptimality_criterion,
            homog_outer_prods,
            design_factors,
            pred_factors,
            noise_multiplier=noise_multiplier,
            regression_type='quantile')
        diffs = check_derivative(goptimality_criterion_wrapper, num_design_pts)
        assert diffs.min() < 6e-5, diffs

        # Test homoscedastic and hetroscedastic API produce same value
        # when noise is homoscedastic
        pp = np.ones((num_design_pts, 1)) / num_design_pts
        noise_multiplier = noise_multiplier * 0 + 1
        assert np.allclose(
            goptimality_criterion(homog_outer_prods,
                                  design_factors,
                                  pred_factors,
                                  pp,
                                  return_grad=False),
            goptimality_criterion(homog_outer_prods,
                                  design_factors,
                                  pred_factors,
                                  pp,
                                  return_grad=False,
                                  noise_multiplier=noise_multiplier))
示例#5
0
    def test_homoscedastic_least_squares_roptimal_design(self):
        """
        Check R (beta=0) and I optimal designs are the same
        """
        poly_degree = 1
        num_design_pts = 2
        design_samples = np.linspace(-1, 1, num_design_pts)
        noise_multiplier = None
        design_factors = univariate_monomial_basis_matrix(
            poly_degree, design_samples)
        num_pred_pts = 3
        pred_samples = np.random.uniform(-1, 1, num_pred_pts)
        pred_factors = univariate_monomial_basis_matrix(
            poly_degree, pred_samples)

        opts = {
            'beta': 0,
            'pred_factors': pred_factors,
            'pred_samples': pred_samples[np.newaxis, :],
            'nonsmooth': False
        }

        opt_problem = AlphabetOptimalDesign('R', design_factors, opts=opts)
        solver_opts = {
            'disp': True,
            'iprint': 0,
            'ftol': 1e-12,
            'maxiter': 2000
        }
        #solver_opts = {'solver':'ipopt','print_level':0,
        #              'tol':1e-8,'acceptable_obj_change_tol':1e-8,
        #               'derivative_test':'first-order','maxiter':1000}
        #solver_opts.update({'constraint_jacobianstructure':partial(get_r_oed_jacobian_structure,num_pred_pts,num_design_pts)})
        mu_R, res = opt_problem.solve(solver_opts, return_full=True)
        homog_outer_prods = compute_homoscedastic_outer_products(
            design_factors)
        variance = compute_prediction_variance(mu_R, pred_factors,
                                               homog_outer_prods)
        assert (res.x[0] <= variance.min())

        del opts['beta']
        if 'constraint_jacobianstructure' in solver_opts:
            del solver_opts['constraint_jacobianstructure']
        opt_problem = AlphabetOptimalDesign('I', design_factors, opts=opts)
        mu_I = opt_problem.solve(solver_opts)
        variance = compute_prediction_variance(mu_I, pred_factors,
                                               homog_outer_prods)
        assert np.allclose(mu_R, mu_I)
示例#6
0
    def test_homoscedastic_doptimality_criterion(self):
        poly_degree = 3
        num_design_pts = 11
        design_samples = np.linspace(-1, 1, num_design_pts)
        noise_multiplier = None
        design_factors = univariate_monomial_basis_matrix(
            poly_degree, design_samples)
        homog_outer_prods = compute_homoscedastic_outer_products(
            design_factors)
        doptimality_criterion_wrapper = partial(doptimality_criterion,
                                                homog_outer_prods,
                                                design_factors)
        diffs = check_derivative(doptimality_criterion_wrapper, num_design_pts)
        #print (diffs)
        assert diffs.min() < 5e-5, diffs

        mu = np.random.uniform(0, 1, (num_design_pts))
        mu /= mu.sum()
        M1 = homog_outer_prods.dot(mu)
        print(np.linalg.det(M1),
              doptimality_criterion_wrapper(mu, return_grad=False))
        assert np.allclose(
            np.log(np.linalg.det(np.linalg.inv(M1))),
            doptimality_criterion_wrapper(mu, return_grad=False))

        jac = lambda x: doptimality_criterion(homog_outer_prods,
                                              design_factors, x)[1]
        hess_matvec = lambda x, p: doptimality_criterion(
            homog_outer_prods, design_factors, x, return_hessian=True)[2].dot(p
                                                                              )
        pya.check_hessian(jac, hess_matvec, mu[:, np.newaxis])
    def test_hetroscedastic_aoptimality_criterion(self):
        poly_degree = 10
        num_design_pts = 101
        design_samples = np.linspace(-1,1,num_design_pts)
        noise_multiplier =design_samples**2
        design_factors = univariate_monomial_basis_matrix(
            poly_degree,design_samples)
        hetero_outer_prods = compute_heteroscedastic_outer_products(
            design_factors,noise_multiplier)
        homog_outer_prods = compute_homoscedastic_outer_products(design_factors)
        aoptimality_criterion_wrapper = partial(
            aoptimality_criterion,homog_outer_prods,design_factors,
            hetero_outer_prods=hetero_outer_prods,
            noise_multiplier=noise_multiplier)
        diffs = check_derivative(aoptimality_criterion_wrapper,num_design_pts)
        #print (diffs)

        assert diffs[np.isfinite(diffs)].min()<4e-7,diffs

        # Test homoscedastic and hetroscedastic API produce same value
        # when noise is homoscedastic
        pp=np.random.uniform(0,1,(num_design_pts,1))
        assert np.allclose(
            aoptimality_criterion_wrapper(pp,return_grad=False),
            aoptimality_criterion(
                homog_outer_prods,design_factors,
                pp,return_grad=False,hetero_outer_prods=hetero_outer_prods,
                noise_multiplier=noise_multiplier*0+1))
示例#8
0
    def test_homoscedastic_least_squares_goptimal_design(self):
        """
        Create G-optimal design
        """
        poly_degree = 2
        num_design_pts = 7
        design_samples = np.linspace(-1, 1, num_design_pts)
        noise_multiplier = None
        design_factors = univariate_monomial_basis_matrix(
            poly_degree, design_samples)
        #pred_factors = design_factors
        pred_factors = univariate_monomial_basis_matrix(
            poly_degree, np.linspace(-1, 1, num_design_pts))

        opts = {'pred_factors': pred_factors}
        opt_problem = AlphabetOptimalDesign('G', design_factors, opts=opts)
        mu = opt_problem.solve({'iprint': 1, 'ftol': 1e-8})
        I = np.where(mu > 1e-5)[0]
        assert np.allclose(I, [0, 3, 6])
        assert np.allclose(np.ones(3) / 3, mu[I])

        # check G gives same as D optimality. This holds due to equivalence
        # theorem
        opt_problem = AlphabetOptimalDesign('D', design_factors)
        mu_d = opt_problem.solve({'iprint': 1, 'ftol': 1e-8})
        assert np.allclose(mu, mu_d)

        # test high-level api for D optimality
        selected_pts, mu_d = optimal_experimental_design(
            design_samples[np.newaxis, :],
            design_factors,
            'D',
            regresion_type='lstsq',
            noise_multiplier=None)
        assert np.allclose(selected_pts, design_samples[I])
        assert np.allclose(mu_d, np.round(mu[I] * num_design_pts))

        # test high-level api for G optimality
        selected_pts, mu_g = optimal_experimental_design(
            design_samples[np.newaxis, :],
            design_factors,
            'G',
            regresion_type='lstsq',
            noise_multiplier=None,
            pred_factors=pred_factors)
        assert np.allclose(selected_pts, design_samples[I])
        assert np.allclose(mu_g, np.round(mu[I] * num_design_pts))
示例#9
0
    def test_prediction_variance(self):
        poly_degree = 10
        num_design_pts = 101
        num_pred_pts = 51
        pred_samples = np.random.uniform(-1, 1, num_pred_pts)
        pred_factors = univariate_monomial_basis_matrix(
            poly_degree, pred_samples)
        design_samples = np.linspace(-1, 1, num_design_pts)
        design_factors = univariate_monomial_basis_matrix(
            poly_degree, design_samples)
        homog_outer_prods = compute_homoscedastic_outer_products(
            design_factors)
        design_prob_measure = np.ones(num_design_pts) / num_design_pts
        # homoscedastic error
        variance = compute_prediction_variance(design_prob_measure,
                                               pred_factors, homog_outer_prods)
        M1 = homog_outer_prods.dot(design_prob_measure)
        variance1 = np.diag(
            pred_factors.dot(np.linalg.inv(M1).dot(pred_factors.T)))
        assert np.allclose(variance, variance1)

        # heteroscedastic error lstsq
        noise_multiplier = design_samples**2 + 1
        variance = compute_prediction_variance(design_prob_measure,
                                               pred_factors, homog_outer_prods,
                                               noise_multiplier)
        M1 = homog_outer_prods.dot(design_prob_measure)
        M0 = homog_outer_prods.dot(design_prob_measure * noise_multiplier**2)
        variance1 = np.diag(
            pred_factors.dot(
                np.linalg.inv(M1).dot(
                    M0.dot(np.linalg.inv(M1)).dot(pred_factors.T))))
        assert np.allclose(variance, variance1)

        # heteroscedastic error quantile
        noise_multiplier = design_samples**2 + 1
        variance = compute_prediction_variance(design_prob_measure,
                                               pred_factors, homog_outer_prods,
                                               noise_multiplier, 'quantile')
        M0 = homog_outer_prods.dot(design_prob_measure)
        M1 = homog_outer_prods.dot(design_prob_measure / noise_multiplier)
        variance1 = np.diag(
            pred_factors.dot(
                np.linalg.inv(M1).dot(
                    M0.dot(np.linalg.inv(M1)).dot(pred_factors.T))))
        assert np.allclose(variance, variance1)
示例#10
0
    def test_r_oed_objective_and_constraint_wrappers(self):
        poly_degree = 10
        num_design_pts = 101
        num_pred_pts = 51
        pred_samples = np.random.uniform(-1, 1, num_pred_pts)
        design_samples = np.linspace(-1, 1, num_design_pts)
        design_factors = univariate_monomial_basis_matrix(
            poly_degree, design_samples)
        pred_factors = univariate_monomial_basis_matrix(
            poly_degree, pred_samples)
        homog_outer_prods = compute_homoscedastic_outer_products(
            design_factors)
        goptimality_criterion_wrapper = partial(goptimality_criterion,
                                                homog_outer_prods,
                                                design_factors, pred_factors)
        mu = np.random.uniform(0, 1, (num_design_pts))
        mu /= mu.sum()
        obj, jac = goptimality_criterion_wrapper(mu)

        beta = 0.75
        pred_weights = np.ones(num_pred_pts) / num_pred_pts
        r_oed_objective_wrapper = partial(r_oed_objective, beta, pred_weights)
        r_oed_jac_wrapper = partial(r_oed_objective_jacobian, beta,
                                    pred_weights)
        x0 = np.concatenate([np.ones(num_design_pts + 1), mu])[:, np.newaxis]
        diffs = pya.check_gradients(r_oed_objective_wrapper, r_oed_jac_wrapper,
                                    x0)
        assert diffs.min() < 6e-5, diffs

        r_oed_constraint_wrapper = partial(
            r_oed_constraint_objective, num_design_pts,
            lambda x: goptimality_criterion_wrapper(x)[0])
        r_oed_constraint_jac_wrapper = partial(
            r_oed_constraint_jacobian, num_design_pts,
            lambda x: goptimality_criterion_wrapper(x)[1])
        x0 = np.concatenate([np.ones(num_pred_pts + 1), mu])[:, np.newaxis]
        from pyapprox import approx_jacobian
        print(x0.shape)
        approx_jacobian(r_oed_constraint_wrapper, x0[:, 0])
        diffs = pya.check_gradients(r_oed_constraint_wrapper,
                                    r_oed_constraint_jac_wrapper, x0)
        assert diffs.min() < 6e-5, diffs
 def test_homoscedastic_roptimality_criterion(self):
     beta=0.5# when beta=0 we get I optimality
     poly_degree = 10;
     num_design_pts = 101
     num_pred_pts = 51
     pred_samples = np.random.uniform(-1,1,num_pred_pts)
     # TODO check if design factors may have to be a subset of pred_factors
     #pred_factors=univariate_monomial_basis_matrix(poly_degree,pred_samples)
     #assert num_design_pts<=pred_factors.shape[0]
     #design_factors = pred_factors[:num_design_pts,:]
     design_samples = np.linspace(-1,1,num_design_pts)
     design_factors = univariate_monomial_basis_matrix(
         poly_degree,design_samples)
     pred_factors=univariate_monomial_basis_matrix(poly_degree,pred_samples)
     homog_outer_prods = compute_homoscedastic_outer_products(design_factors)
     roptimality_criterion_wrapper = partial(
         roptimality_criterion,beta,homog_outer_prods,design_factors,
         pred_factors)
     diffs = check_derivative(roptimality_criterion_wrapper,num_design_pts)
     assert diffs.min()<6e-7, diffs
    def test_hetroscedastic_ioptimality_criterion(self):
        """
        Test homoscedastic and hetroscedastic API produce same value
        when noise is homoscedastic
        """
        poly_degree = 10;
        num_design_pts = 101
        design_samples = np.linspace(-1,1,num_design_pts)
        noise_multiplier = design_samples**2+1
        pred_samples = np.random.uniform(-1,1,51)
        design_factors = univariate_monomial_basis_matrix(
            poly_degree,design_samples)
        pred_factors=univariate_monomial_basis_matrix(poly_degree,pred_samples)
        homog_outer_prods = compute_homoscedastic_outer_products(design_factors)
        hetero_outer_prods = compute_heteroscedastic_outer_products(
            design_factors,noise_multiplier)
        ioptimality_criterion_wrapper = partial(
            ioptimality_criterion,homog_outer_prods,design_factors,pred_factors,
            hetero_outer_prods=hetero_outer_prods,
            noise_multiplier=noise_multiplier)
  
        # Test hetroscedastic API gradients are correct        
        diffs = check_derivative(ioptimality_criterion_wrapper,num_design_pts)
        assert diffs.min()<6e-7,diffs
      
        # Test homoscedastic and hetroscedastic API produce same value
        # when noise is homoscedastic
        pp=np.random.uniform(0,1,(num_design_pts,1))
        assert np.allclose(
            ioptimality_criterion_wrapper(pp,return_grad=False),
            ioptimality_criterion(
                homog_outer_prods,design_factors,pred_factors,
                pp,return_grad=False,hetero_outer_prods=hetero_outer_prods,
                noise_multiplier=noise_multiplier*0+1))

        mu = np.random.uniform(0,1,(num_design_pts)); mu/=mu.sum()
        M1 = homog_outer_prods.dot(mu)
        M0 = hetero_outer_prods.dot(mu)
        u = np.linalg.solve(M1, pred_factors.T)
        assert np.allclose(np.diag(u.T.dot(M0).dot(u)).mean(),
                           ioptimality_criterion_wrapper(mu,return_grad=False))
 def test_hetroscedastic_foptimality_criterion(self):
     poly_degree = 10;
     beta=0.5
     num_design_pts = 101
     design_samples = np.linspace(-1,1,num_design_pts)
     noise_multiplier = design_samples**2+1
     pred_samples = np.random.uniform(-1,1,51)
     design_factors = univariate_monomial_basis_matrix(
         poly_degree,design_samples)
     pred_factors=univariate_monomial_basis_matrix(poly_degree,pred_samples)
     homog_outer_prods = compute_homoscedastic_outer_products(design_factors)
     hetero_outer_prods = compute_heteroscedastic_outer_products(
         design_factors,noise_multiplier)
     roptimality_criterion_wrapper = partial(
         roptimality_criterion,beta,homog_outer_prods,design_factors,
         pred_factors,hetero_outer_prods=hetero_outer_prods,
         noise_multiplier=noise_multiplier)
     
     # Test hetroscedastic API gradients are correct        
     diffs = check_derivative(roptimality_criterion_wrapper,num_design_pts)
     assert diffs.min()<6e-7,diffs
 def test_homoscedastic_coptimality_criterion(self):
     poly_degree = 10;
     num_design_pts = 101
     design_samples = np.linspace(-1,1,num_design_pts)
     noise_multiplier = None
     design_factors = univariate_monomial_basis_matrix(
         poly_degree,design_samples)
     homog_outer_prods = compute_homoscedastic_outer_products(design_factors)
     coptimality_criterion_wrapper = partial(
         coptimality_criterion,homog_outer_prods,design_factors)
     diffs = check_derivative(coptimality_criterion_wrapper,num_design_pts)
     #print (diffs)
     assert diffs.min()<4e-7,diffs
示例#15
0
    def test_hetroscedastic_doptimality_criterion(self):
        poly_degree = 10
        num_design_pts = 51
        design_samples = np.linspace(-1, 1, num_design_pts)
        noise_multiplier = 1 + design_samples**2 + 1
        design_factors = univariate_monomial_basis_matrix(
            poly_degree, design_samples)
        homog_outer_prods = compute_homoscedastic_outer_products(
            design_factors)
        doptimality_criterion_wrapper = partial(
            doptimality_criterion,
            homog_outer_prods,
            design_factors,
            noise_multiplier=noise_multiplier)
        diffs = check_derivative(doptimality_criterion_wrapper, num_design_pts)
        #print (diffs)

        assert diffs[np.isfinite(diffs)].min() < 2e-4, diffs

        # Test quantile regression gradients
        doptimality_criterion_wrapper = partial(
            doptimality_criterion,
            homog_outer_prods,
            design_factors,
            noise_multiplier=noise_multiplier,
            regression_type='quantile')
        diffs = check_derivative(doptimality_criterion_wrapper,
                                 num_design_pts,
                                 rel=False)
        assert diffs.min() < 6e-5, diffs

        # Test homoscedastic and hetroscedastic API produce same value
        # when noise is homoscedastic
        pp = np.ones((num_design_pts, 1)) / num_design_pts
        noise_multiplier = noise_multiplier * 0 + 1
        assert np.allclose(
            doptimality_criterion(homog_outer_prods,
                                  design_factors,
                                  pp,
                                  return_grad=False),
            doptimality_criterion(homog_outer_prods,
                                  design_factors,
                                  pp,
                                  return_grad=False,
                                  noise_multiplier=noise_multiplier))
    def test_homoscedastic_aoptimality_criterion(self):
        poly_degree = 10;
        num_design_pts = 101
        design_samples = np.linspace(-1,1,num_design_pts)
        noise_multiplier = None
        design_factors = univariate_monomial_basis_matrix(
            poly_degree,design_samples)
        homog_outer_prods = compute_homoscedastic_outer_products(design_factors)
        aoptimality_criterion_wrapper = partial(
            aoptimality_criterion,homog_outer_prods,design_factors)
        diffs=check_derivative(aoptimality_criterion_wrapper,num_design_pts)
        #print (diffs)
        assert diffs.min()<5e-7,diffs

        
        mu = np.random.uniform(0,1,(num_design_pts)); mu/=mu.sum()
        M1 = homog_outer_prods.dot(mu)
        assert np.allclose(
            np.trace(np.linalg.inv(M1)),
            aoptimality_criterion_wrapper(mu,return_grad=False))
    def test_evaluate_active_subspace_density_1d_moments(self):
        num_vars = 2
        num_active_vars = 1
        degree = 3
        W,W1,W2 = get_random_active_subspace_eigenvecs(num_vars,num_active_vars)
        vertices = get_zonotope_vertices_and_bounds(W1)[0]

        density_fn = lambda x: np.ones(x.shape[1])*0.25

        indices = compute_hyperbolic_indices(num_active_vars,degree,1.0)

        x1d,w1d = np.polynomial.legendre.leggauss(100)
        w1d /= 2
        
        as_density_vals = evaluate_active_subspace_density_1d(
            W,density_fn,plot_steps=False,points_for_eval=x1d)
        basis_matrix = univariate_monomial_basis_matrix(degree,x1d)

        assert np.allclose(
            moments_of_active_subspace(
                W1.T, indices, monomial_mean_uniform_variables),
            np.dot(basis_matrix.T,w1d))