def kPLSK(xt, yt, xtest, ytest):
    ########### The KPLSK model
    # 'n_comp' and 'theta0' must be an integer in [1,ndim[ and a list of length n_comp, respectively.

    t = KPLSK(n_comp=2, theta0=[1e-2, 1e-2], print_prediction=False)
    t.set_training_values(xt, yt)
    t.train()

    print('KPLSK,  err: ' + str(compute_rms_error(t, xtest, ytest)))
    title = 'KPLSK model: validation of the prediction model'
    return t, title, xtest, ytest
    def test_kplsk(self):
        import numpy as np
        import matplotlib.pyplot as plt

        from smt.surrogate_models import KPLSK

        xt = np.array([0.0, 1.0, 2.0, 3.0, 4.0])
        yt = np.array([0.0, 1.0, 1.5, 0.9, 1.0])

        sm = KPLSK(theta0=[1e-2])
        sm.set_training_values(xt, yt)
        sm.train()

        num = 100
        x = np.linspace(0.0, 4.0, num)
        y = sm.predict_values(x)
        # estimated variance
        s2 = sm.predict_variances(x)
        # derivative according to the first variable
        dydx = sm.predict_derivatives(xt, 0)

        plt.plot(xt, yt, "o")
        plt.plot(x, y)
        plt.xlabel("x")
        plt.ylabel("y")
        plt.legend(["Training data", "Prediction"])
        plt.show()

        # add a plot with variance
        plt.plot(xt, yt, "o")
        plt.plot(x, y)
        plt.fill_between(
            np.ravel(x),
            np.ravel(y - 3 * np.sqrt(s2)),
            np.ravel(y + 3 * np.sqrt(s2)),
            color="lightgrey",
        )
        plt.xlabel("x")
        plt.ylabel("y")
        plt.legend(["Training data", "Prediction", "Confidence Interval 99%"])
        plt.show()
    def test_kplsk(self):
        import numpy as np
        import matplotlib.pyplot as plt

        from smt.surrogate_models import KPLSK

        xt = np.array([0.0, 1.0, 2.0, 3.0, 4.0])
        yt = np.array([0.0, 1.0, 1.5, 0.5, 1.0])

        sm = KPLSK(theta0=[1e-2])
        sm.set_training_values(xt, yt)
        sm.train()

        num = 100
        x = np.linspace(0.0, 4.0, num)
        y = sm.predict_values(x)
        yy = sm.predict_derivatives(xt, 0)
        plt.plot(xt, yt, "o")
        plt.plot(x, y)
        plt.xlabel("x")
        plt.ylabel("y")
        plt.legend(["Training data", "Prediction"])
        plt.show()
示例#4
0
    def test_kplsk(self):
        import numpy as np
        import matplotlib.pyplot as plt

        from smt.surrogate_models import KPLSK

        xt = np.array([0., 1., 2., 3., 4.])
        yt = np.array([0., 1., 1.5, 0.5, 1.0])

        sm = KPLSK(theta0=[1e-2])
        sm.set_training_values(xt, yt)
        sm.train()

        num = 100
        x = np.linspace(0., 4., num)
        y = sm.predict_values(x)
        yy = sm.predict_derivatives(xt, 0)
        plt.plot(xt, yt, 'o')
        plt.plot(x, y)
        plt.xlabel('x')
        plt.ylabel('y')
        plt.legend(['Training data', 'Prediction'])
        plt.show()