Beispiel #1
0
    def _initialize(self):
        """API function: set default values for user options"""
        declare = self.options.declare
        declare("alpha",
                0.5,
                types=(int, float),
                desc="optimizer learning rate")
        declare("beta1",
                0.9,
                types=(int, float),
                desc="Adam optimizer tuning parameter")
        declare("beta2",
                0.99,
                types=(int, float),
                desc="Adam optimizer tuning parameter")
        declare("lambd",
                0.1,
                types=(int, float),
                desc="regularization coefficient")
        declare("gamma",
                1.0,
                types=(int, float),
                desc="gradient-enhancement coefficient")
        declare("deep", 2, types=int, desc="number of hidden layers")
        declare("wide", 2, types=int, desc="number of nodes per hidden layer")
        declare(
            "mini_batch_size",
            64,
            types=int,
            desc="split data into batches of specified size",
        )
        declare("num_epochs",
                10,
                types=int,
                desc="number of random passes through the data")
        declare(
            "num_iterations",
            100,
            types=int,
            desc="number of optimizer iterations per mini-batch",
        )
        declare(
            "seed",
            None,
            types=int,
            desc="random seed to ensure repeatability of results when desired",
        )
        declare("is_print", True, types=bool, desc="print progress (or not)")

        self.supports["derivatives"] = True
        self.supports["training_derivatives"] = True
        self.name = "GENN"

        self.model = Model()

        self._is_trained = False
Beispiel #2
0
    def _initialize(self):
        """API function: set default values for user options"""
        declare = self.options.declare
        declare('alpha', 0.5, types=(int, float), desc='optimizer learning rate')
        declare('beta1', 0.9, types=(int, float), desc='Adam optimizer tuning parameter')
        declare('beta2', 0.99, types=(int, float), desc='Adam optimizer tuning parameter')
        declare('lambd', 0.1, types=(int, float), desc='regularization coefficient')
        declare('gamma', 1.0, types=(int, float), desc='gradient-enhancement coefficient')
        declare('deep', 2, types=int, desc='number of hidden layers')
        declare('wide', 2, types=int, desc='number of nodes per hidden layer')
        declare('mini_batch_size', 64, types=int, desc='split data into batches of specified size')
        declare('num_epochs', 10, types=int, desc='number of random passes through the data')
        declare('num_iterations', 100, types=int, desc='number of optimizer iterations per mini-batch')
        declare('seed', None, types=int, desc='random seed to ensure repeatability of results when desired')
        declare('is_print', True, types=bool, desc='print progress (or not)')

        self.supports['derivatives'] = True
        self.supports['training_derivatives'] = True
        self.name = 'GENN'

        self.model = Model()

        self._is_trained = False
Beispiel #3
0
    def _train(self):
        """
        API function: train the neural net
        """
        # Convert training data to format expected by neural net module
        X, Y, J = smt_to_genn(self.training_points)

        # If there are no training derivatives, turn off gradient-enhancement
        if type(J) == np.ndarray and J.size == 0:
            self.options["gamma"] = 0.0

        # Get hyperparameters from SMT API
        alpha = self.options["alpha"]
        beta1 = self.options["beta1"]
        beta2 = self.options["beta2"]
        lambd = self.options["lambd"]
        gamma = self.options["gamma"]
        deep = self.options["deep"]
        wide = self.options["wide"]
        mini_batch_size = self.options["mini_batch_size"]
        num_iterations = self.options["num_iterations"]
        num_epochs = self.options["num_epochs"]
        seed = self.options["seed"]
        is_print = self.options["is_print"]

        # number of inputs and outputs
        n_x = X.shape[0]
        n_y = Y.shape[0]

        # Train neural net
        self.model = Model.initialize(n_x, n_y, deep, wide)
        self.model.train(
            X=X,
            Y=Y,
            J=J,
            num_iterations=num_iterations,
            mini_batch_size=mini_batch_size,
            num_epochs=num_epochs,
            alpha=alpha,
            beta1=beta1,
            beta2=beta2,
            lambd=lambd,
            gamma=gamma,
            seed=seed,
            silent=not is_print,
        )

        self._is_trained = True
Beispiel #4
0
class GENN(SurrogateModel):
    def _initialize(self):
        """API function: set default values for user options"""
        declare = self.options.declare
        declare("alpha",
                0.5,
                types=(int, float),
                desc="optimizer learning rate")
        declare("beta1",
                0.9,
                types=(int, float),
                desc="Adam optimizer tuning parameter")
        declare("beta2",
                0.99,
                types=(int, float),
                desc="Adam optimizer tuning parameter")
        declare("lambd",
                0.1,
                types=(int, float),
                desc="regularization coefficient")
        declare("gamma",
                1.0,
                types=(int, float),
                desc="gradient-enhancement coefficient")
        declare("deep", 2, types=int, desc="number of hidden layers")
        declare("wide", 2, types=int, desc="number of nodes per hidden layer")
        declare(
            "mini_batch_size",
            64,
            types=int,
            desc="split data into batches of specified size",
        )
        declare("num_epochs",
                10,
                types=int,
                desc="number of random passes through the data")
        declare(
            "num_iterations",
            100,
            types=int,
            desc="number of optimizer iterations per mini-batch",
        )
        declare(
            "seed",
            None,
            types=int,
            desc="random seed to ensure repeatability of results when desired",
        )
        declare("is_print", True, types=bool, desc="print progress (or not)")

        self.supports["derivatives"] = True
        self.supports["training_derivatives"] = True
        self.name = "GENN"

        self.model = Model()

        self._is_trained = False

    def _train(self):
        """
        API function: train the neural net
        """
        # Convert training data to format expected by neural net module
        X, Y, J = smt_to_genn(self.training_points)

        # If there are no training derivatives, turn off gradient-enhancement
        if type(J) == np.ndarray and J.size == 0:
            self.options["gamma"] = 0.0

        # Get hyperparameters from SMT API
        alpha = self.options["alpha"]
        beta1 = self.options["beta1"]
        beta2 = self.options["beta2"]
        lambd = self.options["lambd"]
        gamma = self.options["gamma"]
        deep = self.options["deep"]
        wide = self.options["wide"]
        mini_batch_size = self.options["mini_batch_size"]
        num_iterations = self.options["num_iterations"]
        num_epochs = self.options["num_epochs"]
        seed = self.options["seed"]
        is_print = self.options["is_print"]

        # number of inputs and outputs
        n_x = X.shape[0]
        n_y = Y.shape[0]

        # Train neural net
        self.model = Model.initialize(n_x, n_y, deep, wide)
        self.model.train(
            X=X,
            Y=Y,
            J=J,
            num_iterations=num_iterations,
            mini_batch_size=mini_batch_size,
            num_epochs=num_epochs,
            alpha=alpha,
            beta1=beta1,
            beta2=beta2,
            lambd=lambd,
            gamma=gamma,
            seed=seed,
            silent=not is_print,
        )

        self._is_trained = True

    def _predict_values(self, x):
        """
        API method: predict values using appropriate methods from the neural_network.py module

        :param  x: np.ndarray[n, nx] -- Input values for the prediction points
        :return y: np.ndarray[n, ny] -- Output values at the prediction points
        """
        return self.model.evaluate(x.T).T

    def _predict_derivatives(self, x, kx):
        """
        API method: predict partials using appropriate methods from the neural_network.py module

        :param  x: np.ndarray[n, nx] -- Input values for the prediction points
        :param kx: int -- The 0-based index of the input variable with respect to which derivatives are desired
        :return: dy_dx: np.ndarray[n, ny] -- partial derivatives
        """
        return self.model.gradient(x.T)[:, kx, :].T

    def plot_training_history(self):
        if self._is_trained:
            self.model.plot_training_history()

    def goodness_of_fit(self, xv, yv, dyv_dxv):
        """
        Compute metrics to evaluate goodness of fit and show actual by predicted plot

        :param xv: np.ndarray[n, nx], x validation points
        :param yv: np.ndarray[n, 1], y validation response
        :param dyv_dxv: np.ndarray[n, ny], dydx validation derivatives
        """
        # Store current training points
        training_points = self.training_points

        # Replace training points with test (validation) points
        load_smt_data(self, xv, yv, dyv_dxv)

        # Convert from SMT format to a more convenient format for GENN
        X, Y, J = smt_to_genn(self.training_points)

        # Generate goodness of fit plots
        self.model.goodness_of_fit(X, Y)

        # Restore training points
        self.training_points = training_points