Exemplo n.º 1
0
def model(activation='relu'):

    # input layer
    inputs = keras.layers.Input(shape=(400, 4))

    # layer 1
    activation = utils.activation_fn(activation)
    nn = layers.conv_layer(inputs,
                           num_filters=24,
                           kernel_size=19,
                           padding='same',
                           activation=activation,
                           dropout=0.1,
                           l2=1e-6,
                           bn=True)

    # layer 2
    nn = layers.residual_block(nn, filter_size=5, activation='relu', l2=1e-6)
    nn = keras.layers.MaxPool1D(pool_size=10)(nn)

    # layer 3
    nn = layers.conv_layer(nn,
                           num_filters=48,
                           kernel_size=7,
                           padding='same',
                           activation='relu',
                           dropout=0.3,
                           l2=1e-6,
                           bn=True)
    nn = keras.layers.MaxPool1D(pool_size=5)(nn)

    # layer 4
    nn = layers.conv_layer(nn,
                           num_filters=64,
                           kernel_size=3,
                           padding='valid',
                           activation='relu',
                           dropout=0.4,
                           l2=1e-6,
                           bn=True)
    nn = keras.layers.MaxPool1D(pool_size=4)(nn)

    # layer 5
    nn = keras.layers.Flatten()(nn)
    nn = layers.dense_layer(nn,
                            num_units=96,
                            activation='relu',
                            dropout=0.5,
                            l2=1e-6,
                            bn=True)

    # Output layer
    logits = keras.layers.Dense(1, activation='linear', use_bias=True)(nn)
    outputs = keras.layers.Activation('sigmoid')(logits)

    # compile model
    model = keras.Model(inputs=inputs, outputs=outputs)

    return model
def model(pool_size=[25, 4], activation='relu', input_shape=200):

    if input_shape == 1000:
        multiplier = 2
    else:
        multiplier = 1

    # input layer
    inputs = keras.layers.Input(shape=(input_shape, 4))

    # layer 1
    activation = utils.activation_fn(activation)
    nn = layers.conv_layer(inputs,
                           num_filters=32 * multiplier,
                           kernel_size=19,
                           padding='same',
                           activation=activation,
                           dropout=0.1,
                           l2=1e-6,
                           bn=True)
    nn = keras.layers.MaxPool1D(pool_size=pool_size[0],
                                strides=pool_size[0],
                                padding='same')(nn)

    # layer 2
    nn = layers.conv_layer(nn,
                           num_filters=124 * multiplier,
                           kernel_size=5,
                           padding='same',
                           activation='relu',
                           dropout=0.1,
                           l2=1e-6,
                           bn=True)
    nn = keras.layers.MaxPool1D(pool_size=pool_size[1],
                                strides=pool_size[1],
                                padding='same')(nn)

    # layer 3
    nn = keras.layers.Flatten()(nn)
    nn = layers.dense_layer(nn,
                            num_units=512 * multiplier,
                            activation='relu',
                            dropout=0.5,
                            l2=1e-6,
                            bn=True)

    # Output layer
    logits = keras.layers.Dense(12, activation='linear', use_bias=True)(nn)
    outputs = keras.layers.Activation('sigmoid')(logits)

    # compile model
    model = keras.Model(inputs=inputs, outputs=outputs)

    return model
Exemplo n.º 3
0
def model(activation='log_relu', l2_norm=True):
    def l2_reg(weight_matrix):
        return 0.1 * K.sum(K.square(weight_matrix))

    if l2_norm:
        l2_first = l2_reg
    else:
        l2_first = None

    # input layer
    inputs = keras.layers.Input(shape=(200, 4))

    # layer 1
    nn = keras.layers.Conv1D(
        filters=32,
        kernel_size=19,
        strides=1,
        activation=None,
        use_bias=False,
        padding='same',
        kernel_regularizer=l2_first,
    )(inputs)
    nn = keras.layers.BatchNormalization()(nn)
    activation = utils.activation_fn(activation)
    nn = keras.layers.Activation(activation)(nn)
    nn = keras.layers.Dropout(0.1)(nn)

    # layer 2
    nn = layers.conv_layer(
        nn,
        num_filters=48,
        kernel_size=7,  #176
        padding='same',
        activation='relu',
        dropout=0.2,
        l2=1e-6,
        bn=True)
    nn = keras.layers.MaxPool1D(pool_size=4)(nn)

    # layer 3
    nn = layers.conv_layer(
        nn,
        num_filters=96,
        kernel_size=7,  # 44
        padding='valid',
        activation='relu',
        dropout=0.3,
        l2=1e-6,
        bn=True)
    nn = keras.layers.MaxPool1D(pool_size=4)(nn)

    # layer 4
    nn = layers.conv_layer(
        nn,
        num_filters=128,
        kernel_size=3,  # 9
        padding='valid',
        activation='relu',
        dropout=0.4,
        l2=1e-6,
        bn=True)
    nn = keras.layers.MaxPool1D(
        pool_size=3,  # 3
        strides=3,
        padding='same')(nn)

    # layer 5
    nn = keras.layers.Flatten()(nn)
    nn = layers.dense_layer(nn,
                            num_units=512,
                            activation='relu',
                            dropout=0.5,
                            l2=1e-6,
                            bn=True)

    # Output layer
    logits = keras.layers.Dense(12, activation='linear', use_bias=True)(nn)
    outputs = keras.layers.Activation('sigmoid')(logits)

    model = keras.Model(inputs=inputs, outputs=outputs)

    return model
Exemplo n.º 4
0
def model(activation='log_relu', l2_norm=True):

    def l2_reg(weight_matrix):
        return 0.1 * K.sum(K.square(weight_matrix))


    l2 = 1e-6
    bn = True

    dropout_block0 = 0.1
    dropout_block1 = 0.2
    dropout_block2 = 0.3
    dropout_block3 = 0.4
    dropout_block4 = 0.5
         
      

    if l2_norm:
        l2_first = l2_reg
    else:
        l2_first = None
    # input layer
    inputs = keras.layers.Input(shape=(200,4))
    activation = utils.activation_fn(activation)

    # block 1
    nn = keras.layers.Conv1D(filters=24,
                             kernel_size=19,
                             strides=1,
                             activation=None,
                             use_bias=False,
                             padding='same',
                             #kernel_initializer=keras.initializers.RandomNormal(mean=0.1, stddev=0.05),
                             kernel_regularizer=l2_first, 
                             )(inputs)        
    nn = keras.layers.BatchNormalization()(nn)
    activation = utils.activation_fn(activation)
    nn = keras.layers.Activation(activation)(nn)
    nn = keras.layers.Dropout(dropout_block0)(nn)

    nn = layers.conv_layer(nn,
                           num_filters=32, 
                           kernel_size=7, 
                           padding='same', 
                           activation='relu', 
                           dropout=dropout_block1,
                           l2=l2, 
                           bn=bn)
    nn = keras.layers.MaxPool1D(pool_size=4, 
                                strides=4, 
                                padding='same'
                                )(nn)

    nn = layers.conv_layer(nn,
                           num_filters=48, 
                           kernel_size=7, 
                           padding='valid', 
                           activation='relu', 
                           dropout=dropout_block2,
                           l2=l2, 
                           bn=bn)
    nn = keras.layers.MaxPool1D(pool_size=4, 
                                strides=4, 
                                padding='same'
                                )(nn)

    # layer 2
    nn = layers.conv_layer(nn,
                           num_filters=64, 
                           kernel_size=3, 
                           padding='valid', 
                           activation='relu', 
                           dropout=dropout_block3,
                           l2=l2, 
                           bn=bn)
    nn = keras.layers.MaxPool1D(pool_size=3, 
                                strides=3, 
                                padding='same'
                                )(nn)

    # Fully-connected NN
    nn = keras.layers.Flatten()(nn)
    nn = layers.dense_layer(nn, num_units=96, activation='relu', dropout=dropout_block4, l2=l2, bn=bn)
    #nn = layers.dense_layer(nn, num_units=512, activation='relu', dropout=dropout_block4, l2=l2, bn=bn)

    # Output layer - additive + learned non-linearity
    logits = keras.layers.Dense(1, activation='linear', use_bias=True,  
                                 kernel_initializer='glorot_normal',
                                 bias_initializer='zeros')(nn)
    outputs = keras.layers.Activation('sigmoid')(logits)
        
    model = keras.Model(inputs=inputs, outputs=outputs)

    return model
Exemplo n.º 5
0
def model(activation='relu'):

    # input layer
    inputs = keras.layers.Input(shape=(600, 4))

    activation = utils.activation_fn(activation)

    # layer 1
    nn = layers.conv_layer(
        inputs,
        num_filters=300,
        kernel_size=19,  # 192
        padding='same',
        activation=activation,
        dropout=0.2,
        l2=1e-6,
        bn=True)
    nn = keras.layers.MaxPool1D(pool_size=3)(nn)

    # layer 2
    nn = layers.conv_layer(
        nn,
        num_filters=200,
        kernel_size=11,  # 56
        padding='valid',
        activation='relu',
        dropout=0.2,
        l2=1e-6,
        bn=True)
    nn = keras.layers.MaxPool1D(pool_size=4)(nn)

    # layer 3
    nn = layers.conv_layer(
        nn,
        num_filters=200,
        kernel_size=7,  # 56
        padding='valid',
        activation='relu',
        dropout=0.2,
        l2=1e-6,
        bn=True)
    nn = keras.layers.MaxPool1D(pool_size=4)(nn)

    # layer 4
    nn = keras.layers.Flatten()(nn)
    nn = layers.dense_layer(nn,
                            num_units=1000,
                            activation='relu',
                            dropout=0.5,
                            l2=1e-6,
                            bn=True)

    # layer 5
    nn = layers.dense_layer(nn,
                            num_units=1000,
                            activation='relu',
                            dropout=0.5,
                            l2=1e-6,
                            bn=True)

    # Output layer
    logits = keras.layers.Dense(164, activation='linear', use_bias=True)(nn)
    outputs = keras.layers.Activation('sigmoid')(logits)

    model = keras.Model(inputs=inputs, outputs=outputs)

    return model
def model(activation='relu', input_shape=200, initialization=None):


    if not initialization:
        initialization == 'glorot_uniform'

    if input_shape == 1000:
        multiplier = 2
    else:
        multiplier = 1     

    # input layer
    inputs = keras.layers.Input(shape=(input_shape,4))
    
    # layer 1
    activation = utils.activation_fn(activation)
    nn = layers.conv_layer(inputs,
                           num_filters=32*multiplier, 
                           kernel_size=19,  #200
                           padding='same', 
                           activation=activation, 
                           kernel_initializer=initialization,
                           dropout=0.1,
                           l2=1e-6, 
                           bn=True)

    # layer 2
    nn = layers.conv_layer(nn,
                           num_filters=48*multiplier, 
                           kernel_size=7,   #176
                           padding='same', 
                           activation='relu', 
                           dropout=0.2,
                           l2=1e-6, 
                           bn=True)
    nn = keras.layers.MaxPool1D(pool_size=4)(nn)

    # layer 3
    nn = layers.conv_layer(nn,
                           num_filters=96*multiplier, 
                           kernel_size=7,     # 44
                           padding='valid', 
                           activation='relu', 
                           dropout=0.3,
                           l2=1e-6, 
                           bn=True)
    nn = keras.layers.MaxPool1D(pool_size=4)(nn)

    # layer 4
    nn = layers.conv_layer(nn,
                           num_filters=128*multiplier, 
                           kernel_size=3,   # 9
                           padding='valid', 
                           activation='relu', 
                           dropout=0.4,
                           l2=1e-6, 
                           bn=True)
    nn = keras.layers.MaxPool1D(pool_size=3)(nn)

    # layer 5
    nn = keras.layers.Flatten()(nn)
    nn = layers.dense_layer(nn, num_units=512*multiplier, activation='relu', 
                            dropout=0.5, l2=1e-6, bn=True)

    # Output layer 
    logits = keras.layers.Dense(12, activation='linear', use_bias=True)(nn)
    outputs = keras.layers.Activation('sigmoid')(logits)
        
    # compile model
    model = keras.Model(inputs=inputs, outputs=outputs)

    return model