コード例 #1
0
ファイル: models.py プロジェクト: JaumeJBofi/Ohnologs-ML
def level_model_conv(df_meta_input, df_embeddings, name, bs):
    input_embedding = Input(shape=(len(df_embeddings.columns),
                                   len(df_embeddings.columns), 1),
                            name='embedding_input')
    emb_x = Conv2D(64, 5, activation='relu')(input_embedding)
    emb_x = MaxPool2D()(emb_x)
    emb_x = Conv2D(32, 4, activation='relu')(emb_x)
    emb_x = Conv2D(16, 3, activation='relu')(emb_x)
    emb_x = Flatten()(emb_x)
    emb_x_out = Dense(1, activation='sigmoid', name="aux")(emb_x)

    meta_input = Input(shape=(len(df_meta_input.columns), ), name='meta_input')
    x = keras.layers.concatenate([emb_x, meta_input])
    x = Dense(128, activation='relu')(x)
    x = Dense(64, activation='relu')(x)
    x = Dense(32, activation='relu')(x)
    x = Dense(16, activation='relu')(x)
    x = Dense(8, activation='relu')(x)

    predictions = Dense(1, activation='sigmoid', name="main")(x)

    # This creates a model that includes
    # the Input layer and three Dense layers
    model_created = Model(inputs=[input_embedding, meta_input],
                          outputs=[emb_x_out, predictions])
    model_created.Name = name
    return model_created
コード例 #2
0
def level_siames_merge_RNN_layer_soft(df_meta_input, df_embeddings_1,
                                      df_embeddings_2, name, bs):

    input_embedding_1 = Input(shape=(len(df_embeddings_1.columns), 1),
                              name='embedding_input_1')
    input_embedding_2 = Input(shape=(len(df_embeddings_2.columns), 1),
                              name='embedding_input_2')

    emb_x_1 = get_tensor_embedding_RNN_layer(input_embedding_1)
    emb_x_2 = get_tensor_embedding_RNN_layer(input_embedding_2)

    L1_layer = Lambda(lambda tensors: (tensors[0] - tensors[1]))
    L1_distance = L1_layer([emb_x_1, emb_x_2])

    emb_x_out = Dense(3, activation='softmax', name="aux")(L1_distance)

    meta_input = Input(shape=(len(df_meta_input.columns), ), name='meta_input')

    x = keras.layers.concatenate([L1_distance, meta_input])
    x = Dense(256,
              activation='relu',
              use_bias=False,
              kernel_regularizer=regularizers.l1(0.0005))(x)
    x = BatchNormalization()(x)
    x = Dropout(0.3)(x)
    x = Dense(128,
              activation='relu',
              use_bias=False,
              kernel_regularizer=regularizers.l1(0.0005))(x)
    x = BatchNormalization()(x)
    x = Dropout(0.3)(x)
    x = Dense(64,
              activation='relu',
              use_bias=False,
              kernel_regularizer=regularizers.l1(0.0005))(x)
    x = BatchNormalization()(x)
    x = Dropout(0.2)(x)
    x = Dense(64,
              activation='relu',
              use_bias=False,
              kernel_regularizer=regularizers.l1(0.0005))(x)
    x = BatchNormalization()(x)
    x = Dropout(0.2)(x)
    x = Dense(32,
              activation='relu',
              kernel_regularizer=regularizers.l1(0.0005))(x)
    x = Dropout(0.1)(x)
    x = Dense(16,
              activation='relu',
              use_bias=False,
              kernel_regularizer=regularizers.l1(0.0005))(x)
    x = BatchNormalization()(x)
    predictions = Dense(3, activation='softmax', name="main")(x)

    model_created = Model(
        inputs=[input_embedding_1, input_embedding_2, meta_input],
        outputs=[predictions, emb_x_out])
    model_created.Name = name
    return model_created
コード例 #3
0
def level_model_rnn_soft(df_meta_input, df_embeddings, name, rnn_size, bs):
    input_embedding = Input(shape=(len(df_embeddings.columns), 1),
                            name='embedding_input')
    emb_x = GRU(128,
                activation="relu",
                use_bias=False,
                recurrent_regularizer=regularizers.l1(0.0005),
                kernel_regularizer=regularizers.l1(0.005))(input_embedding)
    emb_x = BatchNormalization()(emb_x)

    emb_x_out = Dense(3, activation='softmax', name="aux")(emb_x)

    meta_input = Input(shape=(len(df_meta_input.columns), ), name='meta_input')
    x = keras.layers.concatenate([emb_x, meta_input])
    x = Dense(128, activation='relu', use_bias=False)(x)
    x = BatchNormalization()(x)
    x = Dropout(0.3)(x)
    x = Dense(64,
              activation='relu',
              use_bias=False,
              kernel_regularizer=regularizers.l2(0.001))(x)
    x = BatchNormalization()(x)
    x = Dropout(0.3)(x)
    x = Dense(32,
              activation='relu',
              use_bias=False,
              kernel_regularizer=regularizers.l2(0.001))(x)
    x = BatchNormalization()(x)
    x = Dropout(0.2)(x)
    x = Dense(32,
              activation='relu',
              use_bias=False,
              kernel_regularizer=regularizers.l2(0.001))(x)
    x = BatchNormalization()(x)
    x = Dropout(0.2)(x)
    x = Dense(16,
              activation='relu',
              kernel_regularizer=regularizers.l2(0.0005))(x)
    x = Dropout(0.1)(x)
    x = Dense(8,
              activation='relu',
              use_bias=False,
              kernel_regularizer=regularizers.l2(0.0005))(x)
    x = BatchNormalization()(x)

    predictions = Dense(3, activation='softmax', name="main")(x)

    # This creates a model that includes
    # the Input layer and three Dense layers
    model_created = Model(inputs=[input_embedding, meta_input],
                          outputs=[emb_x_out, predictions])
    model_created.Name = name
    return model_created
コード例 #4
0
ファイル: models.py プロジェクト: JaumeJBofi/Ohnologs-ML
def flat_model(df_input, name):
    inputs = Input(shape=(len(df_input.columns), ))
    # a layer instance is callable on a tensor, and returns a tensor

    x = Dense(1024,
              activation='relu',
              kernel_regularizer=regularizers.l2(0.005))(inputs)
    x = Dropout(0.2)(x)
    x = Dense(1024, activation='relu', use_bias=False)(x)
    x = BatchNormalization()(x)
    x = Dense(512,
              activation='relu',
              kernel_regularizer=regularizers.l2(0.005, ))(x)
    x = Dropout(0.2)(x)
    x = Dense(512, activation='relu', use_bias=False)(x)
    x = BatchNormalization()(x)
    x = Dense(256,
              activation='relu',
              kernel_regularizer=regularizers.l2(0.005))(x)
    x = Dropout(0.2)(x)
    x = Dense(256, activation='relu', use_bias=False)(x)
    x = BatchNormalization()(x)
    x = Dense(128,
              activation='relu',
              kernel_regularizer=regularizers.l2(0.00001))(x)
    x = Dense(64,
              activation='relu',
              kernel_regularizer=regularizers.l2(0.00001))(x)
    x = Dense(32,
              activation='relu',
              kernel_regularizer=regularizers.l2(0.00001))(x)
    x = Dense(16,
              activation='relu',
              kernel_regularizer=regularizers.l2(0.00001))(x)
    x = Dense(8,
              activation='relu',
              kernel_regularizer=regularizers.l2(0.00001))(x)

    predictions = Dense(1, activation='sigmoid')(x)

    # This creates a model that includes
    # the Input layer and three Dense layers
    model_created = Model(inputs=inputs, outputs=predictions)
    model_created.Name = name
    return model_created