Esempio n. 1
0
 def activation(self, new_activation):
     if new_activation == 'leaky_relu':
         LR = LeakyReLU(alpha=0.001)
         LR.__name__ = 'relu'
         self._activation = LR
     else:
         self._activation = new_activation
Esempio n. 2
0
    def __init__(self, X, Y, *dictionary):
        self.X = X
        self.Y = Y

        self._activation = 'relu'
        self._batch_size = 64
        self._n_epochs = 1000
        self._getNeurons = [1, 1]
        self._dropout = 0
        self._patience = 10
        self._batchNormalization = False
        self._alpha = 0.0001

        self.save_txt = True

        if dictionary:
            settings = dictionary[0]
            try:
                self._center = settings["center"]
            except:
                raise Exception(
                    "centering decision not given to the dictionary!")
                exit()
            try:
                self._centering = settings["centering_method"]
            except:
                raise Exception(
                    "centering criterion not given to the dictionary!")
                exit()
            try:
                self._scale = settings["scale"]
            except:
                raise Exception(
                    "scaling decision not given to the dictionary!")
                exit()
            try:
                self._scaling = settings["scaling_method"]
            except:
                raise Exception(
                    "scaling criterion not given to the dictionary!")
                exit()
            try:
                self._activation = settings["activation_function"]
            except:
                raise Exception(
                    "activation function not given to the dictionary!")
                exit()
            try:
                self._batch_size = settings["batch_size"]
            except:
                raise Exception("batch size not given to the dictionary!")
                exit()
            try:
                self._n_epochs = settings["number_of_epochs"]
            except:
                raise Exception(
                    "number of epochs not given to the dictionary!")
                exit()
            try:
                self._getNeurons = settings["neurons_per_layer"]
            except:
                raise Exception(
                    "number of neurons not given to the dictionary!")
                exit()
            try:
                self._dropout = settings["dropout"]
            except:
                raise Exception("dropout not given to the dictionary!")
                exit()
            try:
                self._patience = settings["patience"]
            except:
                raise Exception(
                    "patience for early stopping not given to the dictionary!")
                exit()
            try:
                self._batchNormalization = settings["batchNormalization"]
            except:
                raise Exception(
                    "batch normalization not given to the dictionary!")
                exit()
            try:
                self._alpha = settings["alpha_LR"]
            except:
                raise Exception(
                    "alpha for leaky relu not given to the dictionary!")
                exit()

            if settings["activation_function"] == 'leaky_relu':
                LR = LeakyReLU(alpha=self._alpha)
                LR.__name__ = 'relu'
                self._activation = LR
Esempio n. 3
0
    def __init__(self, X, Y, *dictionary):
        self.X = X
        self.Y = Y
        super().__init__(self.X, self.Y, *dictionary)

        if dictionary:
            settings = dictionary[0]

            try:
                self._center = settings["center"]
            except:
                raise Exception(
                    "centering decision not given to the dictionary!")
                exit()
            try:
                self._centering = settings["centering_method"]
            except:
                raise Exception(
                    "centering criterion not given to the dictionary!")
                exit()
            try:
                self._scale = settings["scale"]
            except:
                raise Exception(
                    "scaling decision not given to the dictionary!")
                exit()
            try:
                self._scaling = settings["scaling_method"]
            except:
                raise Exception(
                    "scaling criterion not given to the dictionary!")
                exit()
            try:
                self._activation = settings["activation_function"]
            except:
                raise Exception(
                    "activation function not given to the dictionary!")
                exit()
            try:
                self._batch_size = settings["batch_size"]
            except:
                raise Exception("batch size not given to the dictionary!")
                exit()
            try:
                self._n_epochs = settings["number_of_epochs"]
            except:
                raise Exception(
                    "number of epochs not given to the dictionary!")
                exit()
            try:
                self._getNeurons = settings["neurons_per_layer"]
            except:
                raise Exception(
                    "number of neurons not given to the dictionary!")
                exit()
            try:
                self._dropout = settings["dropout"]
            except:
                raise Exception("dropout not given to the dictionary!")
                exit()
            try:
                self._patience = settings["patience"]
            except:
                raise Exception(
                    "patience for early stopping not given to the dictionary!")
                exit()
            try:
                self._alpha = settings["alpha_LR"]
            except:
                raise Exception(
                    "alpha for leaky relu not given to the dictionary!")
                exit()

            if settings["activation_function"] == 'leaky_relu':
                LR = LeakyReLU(alpha=self._alpha)
                LR.__name__ = 'relu'
                self._activation = LR

            try:
                self.Xtest = dictionary[1]
            except:
                print("Test not given!!")
Esempio n. 4
0
    def __init__(self, X, Y, *dictionary):
        self.X = X
        self.Y = Y

        self._activation_output = 'linear'
        self._loss_function = 'mean_squared_error'
        self._monitor_early_stop = 'mean_squared_error'
        self._learningRate = 0.0001

        self.testProcess = False

        super().__init__(self.X, self.Y, *dictionary)

        if dictionary:
            settings = dictionary[0]
            try:
                self._center = settings["center"]
            except:
                raise Exception(
                    "centering decision not given to the dictionary!")
                exit()
            try:
                self._centering = settings["centering_method"]
            except:
                raise Exception(
                    "centering criterion not given to the dictionary!")
                exit()
            try:
                self._scale = settings["scale"]
            except:
                raise Exception(
                    "scaling decision not given to the dictionary!")
                exit()
            try:
                self._scaling = settings["scaling_method"]
            except:
                raise Exception(
                    "scaling criterion not given to the dictionary!")
                exit()
            try:
                self._activation = settings["activation_function"]
            except:
                raise Exception(
                    "activation function not given to the dictionary!")
                exit()
            try:
                self._batch_size = settings["batch_size"]
            except:
                raise Exception("batch size not given to the dictionary!")
                exit()
            try:
                self._n_epochs = settings["number_of_epochs"]
            except:
                raise Exception(
                    "number of epochs not given to the dictionary!")
                exit()
            try:
                self._getNeurons = settings["neurons_per_layer"]
            except:
                raise Exception(
                    "number of neurons per layer not given to the dictionary!")
                exit()
            try:
                self._dropout = settings["dropout"]
            except:
                raise Exception("dropout not given to the dictionary!")
                exit()
            try:
                self._patience = settings["patience"]
            except:
                raise Exception(
                    "patience for early stopping not given to the dictionary!")
                exit()
            try:
                self._alpha = settings["alpha_LR"]
            except:
                raise Exception(
                    "centering decision not given to the dictionary!")
                exit()
            try:
                self._activation_output = settings["activation_output"]
            except:
                raise Exception(
                    "activation output layer not given to the dictionary!")
                exit()
            try:
                self._loss_function = settings["loss_function"]
            except:
                raise Exception("loss function not given to the dictionary!")
                exit()
            try:
                self._monitor_early_stop = settings["monitor"]
            except:
                raise Exception(
                    "monitor for early stopping not given to the dictionary!")
                exit()
            try:
                self._learningRate = settings["learning_rate"]
            except:
                raise Exception(
                    "initial learning rate not given to the dictionary!")
                exit()
            try:
                self._batchNormalization = settings["batchNormalization"]
            except:
                raise Exception(
                    "batch normalization not given to the dictionary!")
                exit()

            if settings["activation_function"] == 'leaky_relu':
                LR = LeakyReLU(alpha=self._alpha)
                LR.__name__ = 'relu'
                self._activation = LR

        if len(dictionary) > 1:
            self.Z = dictionary[1]
            self.testProcess = True