Beispiel #1
0
def cervix_model(x_train, y_train, x_val, y_val, params):

    model = Sequential()
    model.add(Dense(params['first_neuron'],
                    input_dim=x_train.shape[1],
                    activation='relu'))

    model.add(Dropout(params['dropout']))

    hidden_layers(model, params, 1)

    model.add(Dense(1, activation=params['last_activation']))

    model.compile(optimizer=params['optimizer'](lr=lr_normalizer(params['lr'], params['optimizer'])),
                  loss=params['loss'],
                  metrics=['acc',
                           fmeasure,
                           recall,
                           precision,
                           matthews_correlation])

    results = model.fit(x_train, y_train,
                        batch_size=params['batch_size'],
                        epochs=params['epochs'],
                        verbose=0,
                        validation_data=[x_val, y_val],
                        callbacks=early_stopper(params['epochs'], mode='moderate', monitor='val_fmeasure'))

    return results, model
Beispiel #2
0
def iris_model(x_train, y_train, x_val, y_val, params):

    # note how instead of passing the value, we pass a dictionary entry
    model = Sequential()
    model.add(Dense(params['first_neuron'],
                    input_dim=x_train.shape[1],
                    activation='relu'))

    # same here, just passing a dictionary entry
    model.add(Dropout(params['dropout']))

    # with this call we can create any number of hidden layers
    hidden_layers(model, params, y_train.shape[1])

    # again, instead of the activation name, we have a dictionary entry
    model.add(Dense(y_train.shape[1],
                    activation=params['last_activation']))

    # here are using a learning rate boundary
    model.compile(optimizer=params['optimizer'](lr=lr_normalizer(params['lr'],
                  params['optimizer'])),
                  loss=params['losses'],
                  metrics=['acc'])

    # here we are also using the early_stopper function for a callback
    out = model.fit(x_train, y_train,
                    batch_size=params['batch_size'],
                    epochs=params['epochs'],
                    verbose=0,
                    validation_data=[x_val, y_val],
                    callbacks=early_stopper(params['epochs'], mode=[1,1]))

    return out, model
def talos_model(x_train, y_train, x_val, y_val, parameters):
    model = Sequential()

    model.add(Dense(parameters['neurons_HL1'], 
    input_shape=(train_x.shape[1],), 
    activation=parameters['activation_1'],use_bias=True))

    model.add(Dropout(parameters['dropout1']))

    model.add(Dense(parameters['neurons_HL2'], 
    activation=parameters['activation_2'], use_bias=True))

    model.add(Dropout(parameters['dropout1']))
    
    if parameters['neurons_HL3']:
        model.add(Dense(parameters['neurons_HL3'], 
        activation=parameters['activation_3'], use_bias=True))


    model.add(Dense(1, activation='relu'))

    model.compile(optimizer=parameters['optimizer'], loss=parameters['loss-functions'], 
    metrics=['mse', 'mae'])

    history = model.fit(x_train, y_train,
            batch_size=parameters['batch_size'],epochs=parameters['epochs'],
            verbose=0,validation_data=[x_val, y_val],
            callbacks=[early_stopper(epochs=parameters['epochs'], 
            mode='moderate',monitor='val_loss', patience=25)])
    
    return history, model
Beispiel #4
0
    def create_nn2_hyperas(x_train, x_test, y_train, y_test, params):
        seq = Sequential()
        seq.add(Dense(output_dim=params['n_nodes'], init="uniform", activation="relu", input_dim=11))
        seq.add(Dense(output_dim=1, init="uniform", activation="sigmoid"))
        seq.compile(optimizer=Adam(lr=params['learning_rate']), loss="binary_crossentropy", metrics=['accuracy'])  # vs categorical_crossentropy

        epochs = 200
        history = seq.fit(x_train, y_train,
                          verbose=False,
                          batch_size=params["batch_size"],
                          epochs=epochs,
                          validation_data=[x_test, y_test],
                          callbacks=[early_stopper(epochs, mode=[0,50])]
                          )

        return history, seq
Beispiel #5
0
def cervical_cancer(x_train, y_train, x_val, y_val, params):

    from keras.models import Sequential
    from keras.layers import Dropout, Dense
    from talos.model import lr_normalizer, early_stopper, hidden_layers

    from talos.metrics.keras_metrics import matthews_correlation_acc, precision_acc
    from talos.metrics.keras_metrics import recall_acc, fmeasure_acc

    model = Sequential()
    model.add(
        Dense(params['first_neuron'],
              input_dim=x_train.shape[1],
              activation='relu'))

    model.add(Dropout(params['dropout']))

    hidden_layers(model, params, 1)

    model.add(Dense(1, activation=params['last_activation']))

    model.compile(optimizer=params['optimizer'](
        lr=lr_normalizer(params['lr'], params['optimizer'])),
                  loss=params['losses'],
                  metrics=[
                      'acc', fmeasure_acc, recall_acc, precision_acc,
                      matthews_correlation_acc
                  ])

    results = model.fit(x_train,
                        y_train,
                        batch_size=params['batch_size'],
                        epochs=params['epochs'],
                        verbose=0,
                        validation_data=[x_val, y_val],
                        callbacks=[
                            early_stopper(params['epochs'],
                                          mode='moderate',
                                          monitor='val_fmeasure')
                        ])

    return results, model