"f1": 0 } start = time.time() train_head_hierarchy = train_head_deepwalk train_tail_hierarchy = train_tail_deepwalk test_head_hierarchy = test_head_deepwalk test_tail_hierarchy = test_tail_deepwalk with tf.Session() as sess: sess.run(tf.local_variables_initializer()) sess.run(tf.global_variables_initializer()) for epoch in range(epochs): train_head_data, train_head_hierarchy, train_head_context,\ train_tail_data, train_tail_hierarchy, train_tail_context, \ train_labels = permute_dataset((train_head_data, train_head_hierarchy, train_head_context, train_tail_data, train_tail_hierarchy, train_tail_context, train_labels)) runner.train_model( train_op, result.loss, train_batch_num, feed_vars=(head_data_placeholder, head_hierarchy_placeholder, head_context_placeholder, tail_data_placeholder, tail_hierarchy_placeholder, tail_context_placeholder, labels_placeholder), feed_data=pt.train.feed_numpy(batch_size, train_head_data, train_head_hierarchy, train_head_context, train_tail_data, train_tail_hierarchy, train_tail_context, train_labels),
phase=pt.Phase.test) optimizer = tf.train.GradientDescentOptimizer(learning_rate) train_op = pt.apply_optimizer(optimizer, losses=[result.loss]) #save_path = '/data/cdy/ykq/checkpoints/model_conv2d_{}-{}.cpkt'.format( # learning_rate, time.strftime("%m-%d-%H%M%S", time.localtime())) #print("model has been saved: " + save_path) #runner = pt.train.Runner(save_path) runner = pt.train.Runner() best_accuracy = 0 best_epoch = 0 with tf.Session() as sess: # print(epochs) for epoch in range(epochs): train_data, train_labels = permute_dataset((train_data, train_labels)) # 并没有保存最佳的model runner.train_model(train_op, result.loss, num_batches, feed_vars=(data_placeholder, labels_placeholder), feed_data=pt.train.feed_numpy( train_batch_size, train_data, train_labels)) classification_accuracy = runner.evaluate_model( accuracy, num_batches, feed_vars=(data_placeholder, labels_placeholder), feed_data=pt.train.feed_numpy(test_batch_size, test_data, test_labels))
best_epoch = 0 start = time.time() train_head_hierarchy = train_head_deepwalk train_tail_hierarchy = train_tail_deepwalk test_head_hierarchy = test_head_deepwalk test_tail_hierarchy = test_tail_deepwalk train_head_word = train_head_word2vec train_tail_word = train_tail_word2vec test_head_word = test_head_word2vec test_tail_word = test_tail_word2vec with tf.Session() as sess: for epoch in range(epochs): train_head_data, train_head_word, train_head_hierarchy, \ train_tail_data, train_tail_word, train_tail_hierarchy, \ train_labels = permute_dataset((train_head_data, train_head_word, train_head_hierarchy, train_tail_data, train_tail_word, train_tail_hierarchy, train_labels)) runner.train_model( train_op, result.loss, train_batch_num, feed_vars=(head_data_placeholder, head_word2vec_placeholder, head_hierarchy_placeholder, tail_data_placeholder, tail_word2vec_placeholder, tail_hierarchy_placeholder, labels_placeholder), feed_data=pt.train.feed_numpy(batch_size, train_head_data, train_head_word, train_head_hierarchy, train_tail_data, train_tail_word, train_tail_hierarchy, train_labels),