コード例 #1
0
def build_DNN_Arch2(aa_length, nu_length, y_length):
    tf.reset_default_graph()
    # Build neural network - input data shape, number of words in vocabulary (size of first array element). 
    net_aa = tflearn.input_data(shape=[None, aa_length], name="amino")
    net_nu = tflearn.input_data(shape=[None, nu_length], name="nuc")
    # Two fully connected layers with 4 hidden units/neurons
#     net_aa = tflearn.fully_connected(net_aa, 4)
    net_nu = tflearn.fully_connected(net_nu, 4)
    # number of intents, columns in the matrix train_y
    net = tflearn.merge_outputs([net_aa, net_nu])
    net = tflearn.fully_connected(net, 4)
    net = tflearn.fully_connected(net, y_length, activation='sigmoid')
    # regression to find best parameters, during training
    net = tflearn.regression(net, optimizer='adam')

    # Define Deep Neural Network model and setup tensorboard
    model = tflearn.DNN(net, tensorboard_dir='tflearn_arch2_logs', tensorboard_verbose=3)
    return model
def build_tflearn_ann(length):
    input_layer = input_data(shape=[None, length, 1])

    pool_layer_1 = max_pool_1d(input_layer, 10, name='pool_layer_1')
    pool_layer_2 = max_pool_1d(pool_layer_1, 5, name='pool_layer_2')
    pool_layer_3 = max_pool_1d(pool_layer_2, 5, name='pool_layer_3')
    pool_layer_4 = max_pool_1d(pool_layer_3, 5, name='pool_layer_3')

    fully_connect_1 = fully_connected(pool_layer_3,
                                      512,
                                      activation='relu',
                                      name='fully_connect_1',
                                      weights_init='xavier',
                                      regularizer="L2")

    fully_connect_2 = fully_connected(pool_layer_2,
                                      512,
                                      activation='relu',
                                      name='fully_connect_2',
                                      weights_init='xavier',
                                      regularizer="L2")

    fully_connect_3 = fully_connected(pool_layer_1,
                                      512,
                                      activation='relu',
                                      name='fully_connect_3',
                                      weights_init='xavier',
                                      regularizer="L2")

    fully_connect_4 = fully_connected(pool_layer_4,
                                      512,
                                      activation='relu',
                                      name='fully_connect_3',
                                      weights_init='xavier',
                                      regularizer="L2")
    # Merge above layers
    merge_layer = tflearn.merge_outputs(
        [fully_connect_1, fully_connect_2, fully_connect_3, fully_connect_4])
    # merge_layer = tflearn.merge_outputs(
    #     [fully_connect_1, fully_connect_2, fully_connect_3, fully_connect_4, fully_connect_5])
    # merge_layer = tflearn.merge_outputs(
    #     [fully_connect_1, fully_connect_2, fully_connect_3, fully_connect_4, fully_connect_5, fully_connect_6,
    #      fully_connect_7, fully_connect_8, fully_connect_9, fully_connect_10])
    drop_2 = dropout(merge_layer, 0.25)

    fc_layer_4 = fully_connected(drop_2,
                                 2048,
                                 activation='relu',
                                 name='fc_layer_4',
                                 regularizer='L2',
                                 weights_init='xavier',
                                 weight_decay=0.001)
    drop_2 = dropout(fc_layer_4, drop_out_prob)

    fc_layer_5 = fully_connected(drop_2,
                                 1024,
                                 activation='relu',
                                 name='fc_layer_5',
                                 regularizer='L2',
                                 weights_init='xavier',
                                 weight_decay=0.001)
    drop_3 = dropout(fc_layer_5, drop_out_prob)

    fc_layer_6 = fully_connected(drop_3,
                                 128,
                                 activation='relu',
                                 name='fc_layer_5',
                                 regularizer='L2',
                                 weights_init='xavier',
                                 weight_decay=0.001)
    drop_4 = dropout(fc_layer_6, drop_out_prob)

    # Output
    fc_layer_2 = fully_connected(drop_4,
                                 3,
                                 activation='softmax',
                                 name='output')
    network = regression(fc_layer_2,
                         optimizer='adam',
                         loss='softmax_categorical_crossentropy',
                         learning_rate=0.0001,
                         metric='Accuracy')
    model = tflearn.DNN(network)
    return model