Esempio n. 1
0
    def __init__(self,
                 w_initializer,
                 b_initializer,
                 v,
                 lmbda=0,
                 mask=None,
                 exclude=None):
        super(EqlLayer, self).__init__()
        if exclude is None:
            exclude = []
        self.regularizer = regularizers.L1(l1=lmbda)
        self.w_initializer = initializers.get(w_initializer)
        self.b_initializer = initializers.get(b_initializer)
        self.mask = mask
        self.v = v
        self.activations = [identity, sin, cos, sigmoid, mult]

        self.exclusion = 0
        if 'id' in exclude:
            self.exclusion += 1
            self.activations.remove(identity)
        if 'sin' in exclude:
            self.exclusion += 1
            self.activations.remove(sin)
        if 'cos' in exclude:
            self.exclusion += 1
            self.activations.remove(cos)
        if 'sig' in exclude:
            self.exclusion += 1
            self.activations.remove(sigmoid)
        if 'mult' in exclude:
            self.exclusion += 2
            self.activations.remove(mult)
Esempio n. 2
0
def model_definition(input_shape):
    model = models.Sequential()

    model.add(layers.Dense(
                    units=10, 
                    input_shape=(input_shape,), 
                    use_bias=True,
                    #activation=activations.relu,
                    activity_regularizer=regularizers.L2(0.0)))

    # model.add(layers.Dense(
    #                 units=3,
    #                 activity_regularizer=regularizers.L2(0.000),
    #                 #activation=activations.relu,
    #                 use_bias=True))

    model.add(layers.Dense(
                    units=1,
                    activity_regularizer=regularizers.L1(0.0),
                    #activation=activations.sigmoid,
                    use_bias=True))

    model.compile(  
                    optimizer=optimizers.SGD(0.001),
                    loss=losses.MSE, 
                    metrics=['acc'])
    
    return model
Esempio n. 3
0
def initialize_model():
    """NN Model Architecture"""
    ### Model
    model = models.Sequential()

    ### Embedding Padded
    model.add(layers.Embedding(input_dim=5000, output_dim=100, mask_zero=True))

    ### First convolution & max-pooling
    model.add(layers.LSTM(units=100, activation='tanh', return_sequences=True))
    model.add(layers.LSTM(units=100, activation='tanh', return_sequences=True))
    model.add(layers.LSTM(units=50, activation='tanh'))
    model.add(layers.Dropout(0.2))  #change params
    model.add(
        layers.Dense(
            40, activation='relu',
            kernel_regularizer=regularizers.L1(0.01)))  #Use regulazers
    model.add(layers.Dropout(0.2))  #change params
    model.add(
        layers.Dense(
            20, activation='relu',
            kernel_regularizer=regularizers.L1(0.01)))  #Use regulazers
    model.add(layers.Dropout(0.2))  #change params
    model.add(
        layers.Dense(
            10, activation='relu',
            kernel_regularizer=regularizers.L1(0.01)))  #Use regulazers
    model.add(layers.Dropout(0.2))  #change params

    ### Last layer (let's say a classification with 10 output)
    model.add(layers.Dense(1, activation='linear'))

    ### Model compilation
    model.compile(loss='mse', optimizer='rmsprop', metrics=['mae'])

    return model
Esempio n. 4
0
 def __init__(self, w_initializer, b_initializer, lmbda=0, mask=None):
     super(DenseLayer, self).__init__()
     self.regularizer = regularizers.L1(l1=lmbda)
     self.w_initializer = initializers.get(w_initializer)
     self.b_initializer = initializers.get(b_initializer)
     self.mask = mask
Esempio n. 5
0
t_test = X_test.values
t_y_train = y_train.values
t_y_test = y_test.values
#normalizing the data
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler()
t_train = scaler.fit_transform(t_train)
t_test = scaler.transform(t_test)
#creating neural network model

from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras import regularizers
model = Sequential()
model.add(
    Dense(74, activation='tanh', kernel_regularizer=regularizers.L1(0.001)))
model.add(
    Dense(55, activation='relu', kernel_regularizer=regularizers.L1(0.001)))
model.add(
    Dense(40, activation='relu', kernel_regularizer=regularizers.L1(0.001)))
model.add(
    Dense(30, activation='relu', kernel_regularizer=regularizers.L1(0.001)))
model.add(Dense(1, activation='relu'))
model.compile(optimizer='adam', loss='mse')
model.fit(x=t_train,
          y=t_y_train,
          validation_data=(t_test, t_y_test),
          batch_size=128,
          epochs=340)
loss = pd.DataFrame(model.history.history)
loss.plot()
Esempio n. 6
0
predictors, label = input_sequences[:, :-1], input_sequences[:, -1]

label = ku.to_categorical(label, num_classes=total_words)
print('Data preparation done')
model = Sequential()
model.add(Embedding(total_words, 100, input_length=max_sequence_len - 1))
# model.add(Conv1D(100, 5, activation='relu')),
# model.add(GlobalAveragePooling1D()),
model.add(Bidirectional(LSTM(150, return_sequences=True)))
model.add(Dropout(0.2))
model.add(Bidirectional(GRU(100, return_sequences=True)))
model.add(LSTM(100))
model.add(
    Dense(total_words / 2,
          activation='relu',
          kernel_regularizer=regularizers.L1(
              0.01)))  ## A Dense Layer including regularizers
model.add(Dense(total_words, activation='softmax'))
# Pick an optimizer
model.compile(loss='categorical_crossentropy',
              optimizer='adam',
              metrics=['accuracy'])
print(model.summary())
history = model.fit(predictors, label, epochs=10, verbose=2)
import matplotlib.pyplot as plt


def plot_graphs(history, string):
    plt.plot(history.history[string])
    plt.xlabel("Epochs")
    plt.ylabel(string)
    plt.show()