def iris_model(x_train, y_train, x_val, y_val, params):

    model = Sequential()
    model.add(
        Dense(params['first_neuron'],
              input_dim=x_train.shape[1],
              activation='relu'))

    model.add(Dropout(params['dropout']))
    model.add(Dense(y_train.shape[1], activation=params['last_activation']))

    model.compile(optimizer=params['optimizer'](
        lr=lr_normalizer(params['lr'], params['optimizer'])),
                  loss=params['loss'],
                  metrics=['acc'])

    out = model.fit(x_train,
                    y_train,
                    batch_size=params['batch_size'],
                    epochs=params['epochs'],
                    verbose=0,
                    validation_data=[x_val, y_val],
                    callbacks=early_stopper(params['epochs'], mode='strict'))

    return out, model
Exemple #2
0
def nnet_model(X_train, y_train, X_val, y_val, params):

    model = models.Sequential()    
    
    # initial layer
    model.add(layers.Dense(params['first_neuron'], input_dim=X_train.shape[1],
                    activation=params['activation'],
                    kernel_initializer = params['kernel_initializer'] ))
    model.add(layers.Dropout(params['dropout']))
    
    # hidden layers
    talos.utils.hidden_layers(model, params, y_train.shape[1])
    
    
    # final layer
    model.add(layers.Dense(y_train.shape[1], 
                    kernel_initializer=params['kernel_initializer']))
    
    if params['optimizer']=="Adam":
        opt=keras.optimizers.Adam(lr=params['lr'], beta_1=0.9, beta_2=0.999)
    if params['optimizer']=="SGD":
        opt=keras.optimizers.SGD(lr=params['lr'], momentum=params['momentum'], nesterov=True)
    
    model.compile(loss='mean_squared_error',optimizer=opt)
    
    history = model.fit(X_train, y_train, 
                        validation_data=(X_val, y_val),
                        batch_size=32,
                        epochs=1000,
                        verbose=1,
                        callbacks=[early_stopper(epochs=1000, 
                                                mode='strict', 
                                                monitor='val_loss')])
    return history, model
def pet_finder_model(x_train, y_train, x_test, y_test, params):
    model = Sequential()

    model.add(
        Dense(params['num_Nodes'],
              input_dim=n,
              activation='sigmoid',
              kernel_initializer='random_uniform'))
    model.add(Dropout(params['dropout']))
    model.add(
        Dense(params['num_Nodes'],
              activation='sigmoid',
              kernel_initializer='random_uniform'))
    model.add(Dropout(params['dropout']))
    model.add(
        Dense(
            5,
            activation=params['final_activation'],
            kernel_initializer='random_uniform',
        ))

    model.compile(optimizer=Adam(lr=params['lr'],
                                 beta_1=0.9,
                                 beta_2=0.999,
                                 epsilon=1e-8),
                  loss=params['loss_function'],
                  metrics=['accuracy'])

    # Train
    out = model.fit(x=x_train,
                    y=y_train,
                    epochs=epochs,
                    batch_size=32,
                    verbose=1,
                    class_weight='Balanced',
                    validation_split=0.2,
                    callbacks=[
                        early_stopper(epochs=1000,
                                      monitor='val_loss',
                                      mode='moderate'),
                        early_stopper(epochs=1000,
                                      monitor='val_accuracy',
                                      mode='moderate')
                    ])

    return out, model
def dae_model_hl(x_train, y_train, x_val, y_val, params):
    #print(params['x_train_noise'].shape)
    print(x_train.shape)
    print("masking training")
    x_train_noise = mask_function(dataframe=x_train,
                                  noise=float(params['noise']),
                                  batch_sizes=300)  #masking training
    print("masking validation")
    x_val_noise = mask_function(dataframe=x_val,
                                noise=float(params['noise']),
                                batch_sizes=300)  #masking validation

    print("building autoencoder network")
    model = Sequential()
    model.add(
        Dense(params['first_neuron'],
              activation=params['activation'],
              input_shape=(x_train.shape[1], )))
    model.add(Dropout(params['dropout']))

    #m.add(Dense(128,  activation='elu'))
    hidden_layers(model, params, 1)
    model.add(
        Dense(params['embedding_size'],
              activation=params['activation'],
              name="bottleneck"))
    hidden_layers(model, params, 1)
    model.add(Dense(params['first_neuron'], activation=params['activation']))
    #m.add(Dense(512,  activation='elu'))
    model.add(Dropout(params['dropout']))

    model.add(Dense(x_train.shape[1], activation=params['last_activation']))
    #m.compile(loss='mean_squared_error', optimizer = params['optmizer'])
    model.compile(optimizer=params['optimizer'](
        lr=lr_normalizer(params['lr'], params['optimizer'])),
                  loss=params['loss'],
                  metrics=['accuracy'])
    print("training neural network")
    out = model.fit(
        x_train,
        x_train_noise,  #x_train_noise,
        batch_size=params['batch_size'],
        epochs=params['epochs'],
        verbose=0,
        validation_data=[x_val, x_val_noise],  #x_val_noise],
        callbacks=early_stopper(params['epochs'], mode='moderate'))
    #callbacks=early_stopper(params['epochs'], mode='strict'))#noisy_train, train, batch_size=128, epochs=params['epochs'], verbose=1,
    return out, model
Exemple #5
0
def fml_model(x_train, y_train, x_val, y_val, params):
    input_dim = x_train.shape[1]

    # Parameters
    batch_size = params[BATCH_SIZE]
    epochs = params[EPOCHS]
    activation = params[ACTIVATION]
    bias_initializer = params[BIAS_INITIALIZER]
    kernel_initializer = params[KERNEL_INITIALIZER]
    bias_regularizer = params[BIAS_REGULARIZER]
    hidden_layer = params[HIDDEN_LAYER]
    dropout_rate = params[DROPOUT_RATE]

    model = Sequential()
    model.add(
        Dense(hidden_layer[0],
              input_dim=input_dim,
              activation=activation,
              bias_initializer=bias_initializer,
              kernel_initializer=kernel_initializer,
              bias_regularizer=bias_regularizer))
    model.add(BatchNormalization())
    model.add(Dropout(dropout_rate))

    for hidden_layer in hidden_layer[1:]:
        model.add(
            Dense(hidden_layer,
                  activation=activation,
                  bias_initializer=bias_initializer,
                  kernel_initializer=kernel_initializer))
        model.add(BatchNormalization())
        model.add(Dropout(dropout_rate))

    model.add(Dense(1))
    model.compile(loss=keras.losses.mse, optimizer=keras.optimizers.Adam())
    out = model.fit(x_train,
                    y_train,
                    batch_size=batch_size,
                    epochs=epochs,
                    verbose=0,
                    validation_data=[x_val, y_val],
                    callbacks=[early_stopper(epochs, mode='strict')])

    return out, model
Exemple #6
0
def numerai_model(x_train, y_train, x_val, y_val, params):
    print(params)

    model = Sequential()

    ## initial layer
    model.add(
        Dense(params['first_neuron'],
              input_dim=x_train.shape[1],
              activation='relu',
              kernel_initializer=params['kernel_initializer']))
    model.add(Dropout(params['dropout']))

    ## hidden layers
    for i in range(params['hidden_layers']):
        print(f"adding layer {i+1}")
        model.add(
            Dense(params['hidden_neuron'],
                  activation='relu',
                  kernel_initializer=params['kernel_initializer']))
        model.add(Dropout(params['dropout']))

    ## final layer
    model.add(
        Dense(1,
              activation=params['last_activation'],
              kernel_initializer=params['kernel_initializer']))

    model.compile(loss='binary_crossentropy',
                  optimizer=params['optimizer'],
                  metrics=['acc'])

    history = model.fit(
        x_train,
        y_train,
        validation_data=[x_val, y_val],
        batch_size=params['batch_size'],
        epochs=params['epochs'],
        callbacks=[tensorboard,
                   early_stopper(params['epochs'], patience=10)],
        verbose=0)
    return history, model
def emotions_model(dummyXtrain, dummyYtrain, dummyXval, dummyYval, params):

    model = Sequential()
    model.add(
        Conv2D(32, (3, 3),
               padding='same',
               kernel_initializer=params['kernel_initializer']))
    model.add(Activation(params['activation_1']))
    model.add(Conv2D(params['neurons_layer_2'], (3, 3)))
    model.add(Activation(params['activation_2']))
    model.add(MaxPooling2D(pool_size=(2, 2)))
    model.add(Dropout(params['dropout']))
    model.add(Conv2D(params['neurons_layer_3'], (3, 3), padding='same'))
    model.add(Activation(params['activation_3']))
    model.add(Conv2D(params['neurons_layer_4'], (3, 3)))
    model.add(Activation(params['activation_4']))
    model.add(MaxPooling2D(pool_size=(2, 2)))
    model.add(Dropout(params['dropout']))
    model.add(Flatten())
    model.add(Dense(512))
    model.add(Activation(params['activation_5']))
    model.add(Dropout(params['dropout']))
    model.add(Dense(7, activation=params['last_activation']))
    model.compile(optimizer=params['optimizer'](
        lr=lr_normalizer(params['lr'], params['optimizer'])),
                  loss=params['loss'],
                  metrics=['accuracy'])

    history = model.fit(dummyXtrain,
                        dummyYtrain,
                        batch_size=params['batch_size'],
                        epochs=params['epochs'],
                        validation_data=(dummyXval, dummyYval),
                        callbacks=[
                            ModelCheckpoint("conv2d_mwilchek.hdf5",
                                            monitor="val_loss",
                                            save_best_only=True),
                            early_stopper(params['epochs'], mode='strict')
                        ])

    return history, model
def build_model(x_train, y_train, x_val, y_val, params):

    model = keras.Sequential()
    model.add(keras.layers.Dense(10, activation=params['activation'],
                                 input_dim=x_train.shape[1],
                                 use_bias=True,
                                 kernel_initializer='glorot_uniform',
                                 bias_initializer='zeros',
                                 kernel_regularizer=keras.regularizers.l1_l2(l1=params['l1'], l2=params['l2']),
                                 bias_regularizer=None))

    model.add(keras.layers.Dropout(params['dropout']))

    # If we want to also test for number of layers and shapes, that's possible
    hidden_layers(model, params, 1)

    # Then we finish again with completely standard Keras way
    model.add(keras.layers.Dense(1, activation=params['activation'], use_bias=True,
                                 kernel_initializer='glorot_uniform',
                                 bias_initializer='zeros',
                                 kernel_regularizer=keras.regularizers.l1_l2(l1=params['l1'], l2=params['l2']),
                                 bias_regularizer=None))

    model.compile(optimizer=params['optimizer'](lr=lr_normalizer(params['lr'], params['optimizer'])),
                  loss=params['losses'],
                  metrics=['mse'])

    history = model.fit(x_train, y_train,
                        validation_data=[x_val, y_val],
                        batch_size=params['batch_size'],
                        epochs=params['epochs'],
                        callbacks=[early_stopper(epochs=params['epochs'], mode='moderate')],
                        #callbacks=[early_stopper(epochs=params['epochs'], mode='strict')],
                        verbose=0)

    # Finally we have to make sure that history object and model are returned
    return history, model