Exemplo n.º 1
0
    def test_predict_output(self):
        d, n = (3, 10)
        sx = LHS(
            xlimits=np.repeat(np.atleast_2d([0.0, 1.0]), d, axis=0),
            criterion="m",
            random_state=42,
        )
        x = sx(n)
        sy = LHS(
            xlimits=np.repeat(np.atleast_2d([0.0, 1.0]), 1, axis=0),
            criterion="m",
            random_state=42,
        )
        y = sy(n)
        y = y.flatten()

        kriging = MGP(n_comp=2)
        kriging.set_training_values(x, y)
        kriging.train()

        x_fail_1 = np.asarray([0, 0, 0, 0])
        x_fail_2 = np.asarray([0])

        self.assertRaises(ValueError, lambda: kriging.predict_values(x_fail_1))
        self.assertRaises(ValueError, lambda: kriging.predict_values(x_fail_2))

        self.assertRaises(ValueError, lambda: kriging.predict_variances(x_fail_1))
        self.assertRaises(ValueError, lambda: kriging.predict_variances(x_fail_2))

        x_1 = np.atleast_2d([0, 0, 0])

        var = kriging.predict_variances(x_1)
        var_1 = kriging.predict_variances(x_1, True)
        self.assertEqual(var, var_1[0])
Exemplo n.º 2
0
    def test_likelihood_hessian(self):
        for corr_str in [
                "abs_exp",
                "squar_exp",
                "act_exp",
                "matern32",
                "matern52",
        ]:  # For every kernel
            for poly_str in ["constant", "linear",
                             "quadratic"]:  # For every method
                if corr_str == "act_exp":
                    kr = MGP(print_global=False)
                    theta = self.random.rand(4)
                else:
                    kr = KRG(print_global=False)
                    theta = self.theta
                kr.options["poly"] = poly_str
                kr.options["corr"] = corr_str
                kr.set_training_values(self.X, self.y)
                kr.train()
                grad_red, dpar = kr._reduced_likelihood_gradient(theta)

                hess, hess_ij, _ = kr._reduced_likelihood_hessian(theta)
                Hess = np.zeros((theta.shape[0], theta.shape[0]))
                Hess[hess_ij[:, 0], hess_ij[:, 1]] = hess[:, 0]
                Hess[hess_ij[:, 1], hess_ij[:, 0]] = hess[:, 0]

                grad_norm_all = []
                diff_norm_all = []
                ind_theta = []
                for j, omega_j in enumerate(theta):
                    eps_omega = theta.copy()
                    eps_omega[j] += self.eps

                    grad_red_eps, _ = kr._reduced_likelihood_gradient(
                        eps_omega)
                    for i, theta_i in enumerate(theta):

                        hess_eps = (grad_red_eps[i] - grad_red[i]) / self.eps

                        grad_norm_all.append(
                            np.linalg.norm(Hess[i, j]) / np.linalg.norm(Hess))
                        diff_norm_all.append(
                            np.linalg.norm(hess_eps) / np.linalg.norm(Hess))
                        ind_theta.append(r"$x_%d,x_%d$" % (j, i))
                self.assert_error(
                    np.array(grad_norm_all),
                    np.array(diff_norm_all),
                    atol=1e-5,
                    rtol=1e-3,
                )  # from utils/smt_test_case.py
Exemplo n.º 3
0
    def test_likelihood_derivatives(self):
        for corr_str in [
                "abs_exp",
                "squar_exp",
                "act_exp",
                "matern32",
                "matern52",
        ]:  # For every kernel
            for poly_str in ["constant", "linear",
                             "quadratic"]:  # For every method
                if corr_str == "act_exp":
                    kr = MGP(print_global=False)
                    theta = self.random.rand(4)
                else:
                    kr = KRG(print_global=False)
                    theta = self.theta
                kr.options["poly"] = poly_str
                kr.options["corr"] = corr_str
                kr.set_training_values(self.X, self.y)
                kr.train()

                grad_red, dpar = kr._reduced_likelihood_gradient(theta)
                red, par = kr._reduced_likelihood_function(theta)

                grad_norm_all = []
                diff_norm_all = []
                ind_theta = []
                for i, theta_i in enumerate(theta):
                    eps_theta = theta.copy()
                    eps_theta[i] = eps_theta[i] + self.eps

                    red_dk, par_dk = kr._reduced_likelihood_function(eps_theta)
                    dred_dk = (red_dk - red) / self.eps

                    grad_norm_all.append(grad_red[i])
                    diff_norm_all.append(float(dred_dk))
                    ind_theta.append(r"$x_%d$" % i)

                grad_norm_all = np.atleast_2d(grad_norm_all)
                diff_norm_all = np.atleast_2d(diff_norm_all).T
                self.assert_error(grad_norm_all,
                                  diff_norm_all,
                                  atol=1e-5,
                                  rtol=1e-3)  # from utils/smt_test_case.py
Exemplo n.º 4
0
    def test_predict_output(self):
        x = np.random.random((10, 3))
        y = np.random.random((10))

        kriging = MGP(n_comp=2)
        kriging.set_training_values(x, y)
        kriging.train()

        x_fail_1 = np.asarray([0, 0, 0, 0])
        x_fail_2 = np.asarray([0])

        self.assertRaises(ValueError, lambda: kriging.predict_values(x_fail_1))
        self.assertRaises(ValueError, lambda: kriging.predict_values(x_fail_2))

        self.assertRaises(ValueError,
                          lambda: kriging.predict_variances(x_fail_1))
        self.assertRaises(ValueError,
                          lambda: kriging.predict_variances(x_fail_2))

        x_1 = np.atleast_2d([0, 0, 0])

        var = kriging.predict_variances(x_1)
        var_1 = kriging.predict_variances(x_1, True)
        self.assertEqual(var, var_1[0])
Exemplo n.º 5
0
    def test_mgp(self):
        import numpy as np
        import matplotlib.pyplot as plt
        from smt.surrogate_models import MGP
        from smt.sampling_methods import LHS

        # Construction of the DOE
        dim = 3

        def fun(x):
            import numpy as np

            res = (np.sum(x, axis=1)**2 - np.sum(x, axis=1) + 0.2 *
                   (np.sum(x, axis=1) * 1.2)**3)
            return res

        sampling = LHS(xlimits=np.asarray([(-1, 1)] * dim), criterion="m")
        xt = sampling(8)
        yt = np.atleast_2d(fun(xt)).T

        # Build the MGP model
        sm = MGP(
            theta0=[1e-2],
            print_prediction=False,
            n_comp=1,
        )
        sm.set_training_values(xt, yt[:, 0])
        sm.train()

        # Get the transfert matrix A
        emb = sm.embedding["C"]

        # Compute the smallest box containing all points of A
        upper = np.sum(np.abs(emb), axis=0)
        lower = -upper

        # Test the model
        u_plot = np.atleast_2d(np.arange(lower, upper, 0.01)).T
        x_plot = sm.get_x_from_u(u_plot)  # Get corresponding points in Omega
        y_plot_true = fun(x_plot)
        y_plot_pred = sm.predict_values(u_plot)
        sigma_MGP, sigma_KRG = sm.predict_variances(u_plot, True)

        u_train = sm.get_u_from_x(xt)  # Get corresponding points in A

        # Plots
        fig, ax = plt.subplots()
        ax.plot(u_plot, y_plot_pred, label="Predicted")
        ax.plot(u_plot, y_plot_true, "k--", label="True")
        ax.plot(u_train, yt, "k+", mew=3, ms=10, label="Train")
        ax.fill_between(
            u_plot[:, 0],
            y_plot_pred - 3 * sigma_MGP,
            y_plot_pred + 3 * sigma_MGP,
            color="r",
            alpha=0.5,
            label="Variance with hyperparameters uncertainty",
        )
        ax.fill_between(
            u_plot[:, 0],
            y_plot_pred - 3 * sigma_KRG,
            y_plot_pred + 3 * sigma_KRG,
            color="b",
            alpha=0.5,
            label="Variance without hyperparameters uncertainty",
        )

        ax.set(xlabel="x", ylabel="y", title="MGP")
        fig.legend(loc="upper center", ncol=2)
        fig.tight_layout()
        fig.subplots_adjust(top=0.74)
        plt.show()