Ejemplo n.º 1
0
class InterceptorSurrogateQoI(QuantityOfInterest):
    """
    Class that creates a surrogate model for the dymos supersonic intercpetor problem
    for analysis
    """
    def __init__(self, systemsize, input_dict, data_type=np.float):
        QuantityOfInterest.__init__(self, systemsize, data_type=data_type)

        # Load the eigenmodes
        fname = input_dict['surrogate info full path']
        surrogate_info = np.load(fname)
        surrogate_samples = surrogate_info['input_samples']
        fval_arr = surrogate_info['fvals']

        # Create the surrogate
        self.surrogate_type = input_dict['surrogate_type']
        if self.surrogate_type == 'quadratic':
            self.surrogate = QP()
        elif self.surrogate_type == 'kriging':
            theta0 = input_dict['kriging_theta']
            self.surrogate = KRG(theta0=[theta0],
                                 corr=input_dict['correlation function'])
        else:
            raise NotImplementedError
        self.surrogate.set_training_values(surrogate_samples.T, fval_arr)
        self.surrogate.train()

    def eval_QoI(self, mu, xi):
        rv = mu + xi
        return self.surrogate.predict_values(np.expand_dims(rv, axis=0))

    def eval_QoIGradient(self, mu, xi):
        rv = np.expand_dims(mu + xi, axis=0)
        dfdrv = np.zeros(self.systemsize, dtype=self.data_type)
        for i in range(self.systemsize):
            dfdrv[i] = self.surrogate.predict_derivatives(rv, i)[0, 0]

        return dfdrv

    def eval_QoIGradient_fd(self, mu, xi):
        # This function uses numdifftools to compute the gradients. Only use for
        # debugging.
        def func(xi):
            return self.eval_QoI(mu, xi)

        G = nd.Gradient(func)(xi)
        return G
    def test_krg(self):
        import numpy as np
        import matplotlib.pyplot as plt

        from smt.surrogate_models import KRG

        xt = np.array([0.0, 1.0, 2.0, 3.0, 4.0])
        yt = np.array([0.0, 1.0, 1.5, 0.9, 1.0])

        sm = KRG(theta0=[1e-2])
        sm.set_training_values(xt, yt)
        sm.train()

        num = 100
        x = np.linspace(0.0, 4.0, num)
        y = sm.predict_values(x)
        # estimated variance
        s2 = sm.predict_variances(x)
        # derivative according to the first variable
        dydx = sm.predict_derivatives(xt, 0)
        fig, axs = plt.subplots(2)

        axs[0].plot(xt, yt, "o")
        axs[0].plot(x, y)
        axs[0].set_xlabel("x")
        axs[0].set_ylabel("y")
        axs[0].legend(["Training data", "Prediction"])

        # add a plot with variance
        axs[1].plot(xt, yt, "o")
        axs[1].plot(x, y)
        axs[1].fill_between(
            np.ravel(x),
            np.ravel(y - 3 * np.sqrt(s2)),
            np.ravel(y + 3 * np.sqrt(s2)),
            color="lightgrey",
        )
        axs[1].set_xlabel("x")
        axs[1].set_ylabel("y")
        axs[1].legend(
            ["Training data", "Prediction", "Confidence Interval 99%"])

        plt.show()