Example #1
0
 def activation(self, new_activation):
     if new_activation == 'leaky_relu':
         LR = LeakyReLU(alpha=self._alpha)
         LR.__name__ = 'relu'
         self._activation = LR
     else:
         self._activation = new_activation
Example #2
0
    def __init__(self, X, Y, *dictionary):
        self.X = X
        self.Y = Y

        self._activation = 'relu'
        self._batch_size = 64
        self._n_epochs = 1000
        self._getNeurons = [1, 1]
        self._dropout = 0
        self._patience = 10
        self._batchNormalization = False
        self._alpha = 0.0001

        self.save_txt = True

        if dictionary:
            settings = dictionary[0]

            self._center = settings["center"]
            self._centering = settings["centering_method"]
            self._scale = settings["scale"]
            self._scaling = settings["scaling_method"]
            self._activation = settings["activation_function"]
            self._batch_size = settings["batch_size"]
            self._n_epochs = settings["number_of_epochs"]
            self._getNeurons = settings["neurons_per_layer"]
            self._dropout = settings["dropout"]
            self._patience = settings["patience"]
            self._batchNormalization = settings["batchNormalization"]
            self._alpha = settings["alpha_LR"]

            if settings["activation_function"] == 'leaky_relu':
                LR = LeakyReLU(alpha=self._alpha)
                LR.__name__ = 'relu'
                self._activation = LR
Example #3
0
    def __init__(self, X, Y, *dictionary):
        self.X = X
        self.Y = Y
        super().__init__(self.X, self.Y, *dictionary)

        if dictionary:
            settings = dictionary[0]

            self._center = settings["center"]
            self._centering = settings["centering_method"]
            self._scale = settings["scale"]
            self._scaling = settings["scaling_method"]
            self._activation = settings["activation_function"]
            self._batch_size = settings["batch_size"]
            self._n_epochs = settings["number_of_epochs"]
            self._getNeurons = settings["neurons_per_layer"]
            self._dropout = settings["dropout"]
            self._patience = settings["patience"]

            if settings["activation_function"] == 'leaky_relu':
                LR = LeakyReLU(alpha=self._alpha)
                LR.__name__ = 'relu'
                self._activation = LR
Example #4
0
    def __init__(self, X, Y, *dictionary):
        self.X = X
        self.Y = Y
        self._activation_output = 'linear'
        self._loss_function = 'mean_squared_error'
        self._monitor_early_stop = 'mean_squared_error'
        self._learningRate = 0.0001

        super().__init__(self.X, self.Y, *dictionary)

        if dictionary:
            settings = dictionary[0]

            self._center = settings["center"]
            self._centering = settings["centering_method"]
            self._scale = settings["scale"]
            self._scaling = settings["scaling_method"]
            self._activation = settings["activation_function"]
            self._batch_size = settings["batch_size"]
            self._n_epochs = settings["number_of_epochs"]
            self._getNeurons = settings["neurons_per_layer"]
            self._dropout = settings["dropout"]
            self._patience = settings["patience"]
            self._alpha = settings["alpha_LR"]
            self._activation_output = settings["activation_output"]
            self._loss_function = settings["loss_function"]
            self._monitor_early_stop = settings["monitor"]
            self._learningRate = settings["learning_rate"]

            if settings["activation_function"] == 'leaky_relu':
                LR = LeakyReLU(alpha=self._alpha)
                LR.__name__ = 'relu'
                self._activation = LR

        if len(dictionary) > 1:
            self.Z = dictionary[1]
            self.testProcess = True
Example #5
0
import glob
import numpy as np
from keras.models import Sequential, Model
from keras.layers import Dense, Dropout, Conv2D, MaxPooling2D, Flatten, Input
from sklearn.model_selection import KFold, cross_val_score, cross_val_predict, train_test_split
from keras.callbacks import EarlyStopping, ModelCheckpoint
from keras.wrappers.scikit_learn import KerasClassifier
from sklearn.metrics import accuracy_score, f1_score
from keras.models import load_model
from keras.layers import LeakyReLU
leaky = LeakyReLU(alpha=0.2)
leaky.__name__ = 'leaky'

#1. data
x_train = np.load('/tf/notebooks/Keum/data/x_train.npy').reshape(
    -1, 384, 384, 1)
x_pred = np.load('/tf/notebooks/Keum/data/x_test.npy').reshape(-1, 384, 384, 1)
x_val = np.load('/tf/notebooks/Keum/data/x_val.npy').reshape(-1, 384, 384, 1)

y_train = np.load('/tf/notebooks/Keum/data/y_train.npy')
y_val = np.load('/tf/notebooks/Keum/data/y_test.npy')

# load_model
model = load_model('/tf/notebooks/Keum/save_model/model02.h5')

# y_pred = model.predict(x_test)
# f1_score = f1_score(y_test, y_pred)
# print('f1_score : ', f1_score)

# y_predict = model.predict(x_pred)
Example #6
0
        # print('wm:',w_batch.shape, w_batch.max(), w_batch.min())

        yield (c_batch, w_batch)


### layer / model
from keras.layers import Input, Conv2D, concatenate, Dense, Dropout, add, GlobalAveragePooling2D, \
UpSampling2D, BatchNormalization, LeakyReLU, Activation, AveragePooling2D, MaxPooling2D, Reshape
from keras.layers.advanced_activations import LeakyReLU
from keras.models import Model
import keras.backend as K
from keras import optimizers
import tensorflow as tf

LR = LeakyReLU()
LR.__name__ = 'relu'


def conv_block(x, scale, filters, prefix):

    d = K.int_shape(x)
    d = d[-1]

    filters = 32

    ### path #1
    p1 = Conv2D(int(filters * scale), kernel_size=(1, 1), strides=1, activation=LR, \
                padding='same', name=prefix + 'path1_1x1_conv')(x)

    ### path #2
    p2 = Conv2D(int(filters * scale), kernel_size=(1, 1), strides=1, activation=LR, \