def create_model(sequence_length,
                 feature_dim,
                 RANDOM_STATE,
                 hd,
                 cell_type = 'RNN',
                 dropout = 0.2):

    noLayers = int(np.sum(hd != 0))
    #sequence_length, feature_dim = seqs.X_train[0].shape

    if cell_type == 'LSTM':
        RNNobj = LSTM
    elif cell_type == 'RNN':
        RNNobj = SimpleRNN

    callbacks = [ModelCheckpoint(
                    filepath = 'weights.{epoch:02d}-{val_acc:.2f}.hdf5',
                    monitor = 'val_acc',
                    save_best_only = True)]


    #create model
    model = Sequential()
    if noLayers == 1:
        model.add(RNNobj(input_shape = (sequence_length, feature_dim),
                         units = hd[0]))
        model.add(Dropout(dropout))
    elif noLayers == 2:
        model.add(RNNobj(return_sequences = True,
                       input_shape = (sequence_length, feature_dim),
                       units = hd[0]))
        model.add(Dropout(dropout))
        model.add(RNNobj(hd[1]))
        model.add(Dropout(dropout))
    elif noLayers == 3:
        model.add(RNNobj(return_sequences = True,
                       input_shape = (sequence_length, feature_dim),
                       units = hd[0]))
        model.add(Dropout(dropout))
        model.add(RNNobj(return_sequences = True, units = hd[1]))
        model.add(Dropout(dropout))
        model.add(RNNobj(hd[2]))
        model.add(Dropout(dropout))
    model.add(Dense(feature_dim, activation='sigmoid'))
    if feature_dim == 2:
        loss = 'binary_crossentropy'
    else:
        loss = 'categorical_crossentropy'
    model.compile(loss = loss,
                  optimizer = 'adam',
                  metrics = ['accuracy'])
    model.RANDOM_STATE = RANDOM_STATE
    return model
def create_model(hd=[10, 0, 0],
                 sequence_length=30,
                 feature_dim=4,
                 output_dim=2,
                 RANDOM_STATE=10,
                 cell_type='RNN',
                 dropout=0.2):

    noLayers = int(np.sum(hd != 0))
    #sequence_length, feature_dim = seqs.X_train[0].shape

    if cell_type == 'LSTM':
        RNNobj = LSTM
    elif cell_type == 'RNN':
        RNNobj = SimpleRNN

    #create model
    model = Sequential()
    if noLayers == 1:
        model.add(
            RNNobj(input_shape=(sequence_length, feature_dim), units=hd[0]))
        model.add(Dropout(dropout))
    elif noLayers == 2:
        model.add(
            RNNobj(return_sequences=True,
                   input_shape=(sequence_length, feature_dim),
                   units=hd[0]))
        model.add(Dropout(dropout))
        model.add(RNNobj(hd[1]))
        model.add(Dropout(dropout))
    elif noLayers == 3:
        model.add(
            RNNobj(return_sequences=True,
                   input_shape=(sequence_length, feature_dim),
                   units=hd[0]))
        model.add(Dropout(dropout))
        model.add(RNNobj(return_sequences=True, units=hd[1]))
        model.add(Dropout(dropout))
        model.add(RNNobj(hd[2]))
        model.add(Dropout(dropout))
    model.add(Dense(output_dim, activation='sigmoid'))
    if output_dim == 2:
        loss = 'binary_crossentropy'
    else:
        loss = 'categorical_crossentropy'
    model.compile(loss=loss, optimizer='adam', metrics=['accuracy'])
    model.RANDOM_STATE = RANDOM_STATE
    return model
Beispiel #3
0
def create_model(sequence_length,
                 feature_dim,
                 RANDOM_STATE,
                 UNITS,
                 cell_type='RNN',
                 dropout=0.2):
    '''
    Arguments:
    sequence_length - how many trials back to look
    feature_dim - how big the vocabulary space is
    cell_type - whether to include simple RNN or LSTM cells
    drouput - 0 <= rate < 1
    UNITS - array specifying how many units per layer, if value is 0 then
    we don't want that as a layer: e.g. [10 10 10] is 3 layers with 10 units
    each. [20 50 0] is a 2-layer network with 20 units in the first layer and
    50 units in the second layer.

    output:
    compiled model, with ADAM optimizer with default learning rate

    '''

    noLayers = int(np.sum(UNITS != 0))

    if cell_type == 'LSTM':
        RNNobj = LSTM
    elif cell_type == 'RNN':
        RNNobj = SimpleRNN

    #create model
    model = Sequential()
    if noLayers == 1:
        model.add(
            RNNobj(input_shape=(sequence_length, feature_dim), units=UNITS[0]))
        model.add(Dropout(dropout))
    elif noLayers == 2:
        model.add(
            RNNobj(return_sequences=True,
                   input_shape=(sequence_length, feature_dim),
                   units=UNITS[0]))
        model.add(Dropout(dropout))
        model.add(RNNobj(UNITS[1]))
        model.add(Dropout(dropout))
    elif noLayers == 3:
        model.add(
            RNNobj(return_sequences=True,
                   input_shape=(sequence_length, feature_dim),
                   units=UNITS[0]))
        model.add(Dropout(dropout))
        model.add(RNNobj(return_sequences=True, units=UNITS[1]))
        model.add(Dropout(dropout))
        model.add(RNNobj(UNITS[2]))
        model.add(Dropout(dropout))

    #give probabilities
    model.add(Dense(feature_dim, activation='sigmoid'))
    if feature_dim == 2:
        loss = 'binary_crossentropy'
    else:
        loss = 'categorical_crossentropy'
    model.compile(loss=loss, optimizer='adam', metrics=['accuracy'])
    model.RANDOM_STATE = RANDOM_STATE
    return model