Пример #1
0
def cervix_model(x_train, y_train, x_val, y_val, params):

    model = Sequential()
    model.add(Dense(params['first_neuron'],
                    input_dim=x_train.shape[1],
                    activation='relu'))

    model.add(Dropout(params['dropout']))

    hidden_layers(model, params, 1)

    model.add(Dense(1, activation=params['last_activation']))

    model.compile(optimizer=params['optimizer'](lr=lr_normalizer(params['lr'], params['optimizer'])),
                  loss=params['loss'],
                  metrics=['acc',
                           fmeasure,
                           recall,
                           precision,
                           matthews_correlation])

    results = model.fit(x_train, y_train,
                        batch_size=params['batch_size'],
                        epochs=params['epochs'],
                        verbose=0,
                        validation_data=[x_val, y_val],
                        callbacks=early_stopper(params['epochs'], mode='moderate', monitor='val_fmeasure'))

    return results, model
Пример #2
0
def iris_model(x_train, y_train, x_val, y_val, params):

    # note how instead of passing the value, we pass a dictionary entry
    model = Sequential()
    model.add(Dense(params['first_neuron'],
                    input_dim=x_train.shape[1],
                    activation='relu'))

    # same here, just passing a dictionary entry
    model.add(Dropout(params['dropout']))

    # with this call we can create any number of hidden layers
    hidden_layers(model, params, y_train.shape[1])

    # again, instead of the activation name, we have a dictionary entry
    model.add(Dense(y_train.shape[1],
                    activation=params['last_activation']))

    # here are using a learning rate boundary
    model.compile(optimizer=params['optimizer'](lr=lr_normalizer(params['lr'],
                  params['optimizer'])),
                  loss=params['losses'],
                  metrics=['acc'])

    # here we are also using the early_stopper function for a callback
    out = model.fit(x_train, y_train,
                    batch_size=params['batch_size'],
                    epochs=params['epochs'],
                    verbose=0,
                    validation_data=[x_val, y_val],
                    callbacks=early_stopper(params['epochs'], mode=[1,1]))

    return out, model
Пример #3
0
    def model(dummyXtrain, dummyYtrain, dummyXval, dummyYval, params):
        conv_shape = _create_conv_shape_(params)

        model = Sequential()

        for i, (depth, size) in enumerate(conv_shape):
            if i == 0:
                model.add(
                    Conv2D(depth, size, input_shape=params['input_shape']))
            else:
                model.add(Conv2D(depth, size))
            model.add(Activation('relu'))

        model.add(Flatten())

        hidden_layers(model, params, params['last_neuron'])

        model.add(Dense(4))  #4 is the shape of the data
        model.add(Activation('softmax'))

        global config
        optimizer = config.optimizer(**config.optimizer_parameters)
        model.compile(loss=config.loss,
                      optimizer=optimizer,
                      metrics=config.metric)

        training_set.batch_size = params['batch_size']
        validation_set.batch_size = params['batch_size']

        model.summary()

        n_train = My_Custom_Generator.getNumber(training_set)
        n_test = My_Custom_Generator.getNumber(validation_set)
        print('number of training images: ', n_train)
        print('number of val images: ', n_test)

        history = model.fit_generator(
            training_set,
            validation_data=validation_set,
            epochs=params['epoch'],
            verbose=int(params['verbose']),
        )
        #TODO: save model as h5 and history using round as naming index? maybe...
        return history, model
Пример #4
0
def cervical_cancer(x_train, y_train, x_val, y_val, params):

    from keras.models import Sequential
    from keras.layers import Dropout, Dense
    from talos.model import lr_normalizer, early_stopper, hidden_layers

    from talos.metrics.keras_metrics import matthews_correlation_acc, precision_acc
    from talos.metrics.keras_metrics import recall_acc, fmeasure_acc

    model = Sequential()
    model.add(
        Dense(params['first_neuron'],
              input_dim=x_train.shape[1],
              activation='relu'))

    model.add(Dropout(params['dropout']))

    hidden_layers(model, params, 1)

    model.add(Dense(1, activation=params['last_activation']))

    model.compile(optimizer=params['optimizer'](
        lr=lr_normalizer(params['lr'], params['optimizer'])),
                  loss=params['losses'],
                  metrics=[
                      'acc', fmeasure_acc, recall_acc, precision_acc,
                      matthews_correlation_acc
                  ])

    results = model.fit(x_train,
                        y_train,
                        batch_size=params['batch_size'],
                        epochs=params['epochs'],
                        verbose=0,
                        validation_data=[x_val, y_val],
                        callbacks=[
                            early_stopper(params['epochs'],
                                          mode='moderate',
                                          monitor='val_fmeasure')
                        ])

    return results, model
Пример #5
0
    def fake_news_model(self, x_train, y_train, x_val, y_val, params):
        model = Sequential()
        model.add(Dense(10, input_dim=(len(self.HEADERS) - 1),
                        activation=params['activation'],
                        kernel_initializer='normal'))

        model.add(Dropout(params['dropout']))

        hidden_layers(model, params, 1)

        model.add(Dense(1, activation=params['last_activation'],
                        kernel_initializer='normal'))

        model.compile(loss=params['losses'],
                      optimizer=params['optimizer'](lr=lr_normalizer(params['lr'], params['optimizer'])),
                      metrics=['acc'])

        history = model.fit(x_train, y_train,
                            validation_data=[x_val, y_val],
                            batch_size=params['batch_size'],
                            epochs=params['epochs'],
                            verbose=0)

        return history, model