Пример #1
0
random_embedding=True
hidden_units = 128
learning_rate=0.001
batch_size = 1024
epochs = 50

model = DSSM(sequence_length, vocab_size)()

# with tf.Graph().as_default():
with tf.Session() as sess:
    # initialize all variables
    sess.run(tf.global_variables_initializer())

    def train(train_q, train_h, y):
        feed_dict = {
            model.query : train_q,
            model.doc : train_h, 
            model.y : y, 
            model.keep_prob: 0.5
        }
        loss, acc = sess.run([model.loss, model.acc], feed_dict)
        print(loss, acc)

    batches = data_handler.batch_iter(list(zip(train_p, train_h, train_y)), 
                                      batch_size, 
                                      epochs)
    for batch in batches:
        train_q_batch, train_h_batch, train_y_batch = zip(*batch)
        train(train_q_batch, train_h_batch, train_y_batch)