Exemple #1
0
dim_y = 2
train_size = train_data.shape[0]
test_size = test_data.shape[0]
print("dim_x: ", dim_x, "\ntrain_size:", train_size, "\ntest_size:", test_size)

train_data = np.reshape(train_data, [train_size, dim_x, 1, 1])
test_data = np.reshape(test_data, [test_size, dim_x, 1, 1])
assert train_size % batch_size == 0, '#TrainSize % #BatchSize != 0'
assert test_size % batch_size == 0, '#TestSize % #BatchSize != 0'
train_batch_num = int(train_size / batch_size)
test_batch_num = int(test_size / batch_size)

data_placeholder = tf.placeholder(tf.float32, [None, dim_x, 1, 1])
labels_placeholder = tf.placeholder(tf.float32, [None, dim_y])

result = lenet5(data_placeholder, labels_placeholder, dim_y)
pred_logit = result.softmax.softmax_activation()
accuracy_tensor = result.softmax.evaluate_classifier(labels_placeholder,
                                                     phase=pt.Phase.test)
precision_tensor, recall_tensor = result.softmax.evaluate_precision_recall(
    labels_placeholder, phase=pt.Phase.test)
_, auroc_tensor = tf.metrics.auc(labels_placeholder, pred_logit)
_, aupr_tensor = tf.metrics.auc(labels_placeholder, pred_logit, curve="PR")

optimizer = tf.train.GradientDescentOptimizer(learning_rate)
train_op = pt.apply_optimizer(optimizer, losses=[result.loss])

#save_path = '/data/cdy/ykq/checkpoints/model_conv2d_{}-{}.cpkt'.format(
#            learning_rate, time.strftime("%m-%d-%H%M%S", time.localtime()))
#print("model has been saved: " + save_path)
#runner = pt.train.Runner(save_path)
clatent_dim = head_context_trans.shape[-1]
print("clatent_dim: ", clatent_dim)

input_data = tf.reshape(
    tf.concat([
        head_trans, head_hierarchy_placeholder, head_context_trans, tail_trans,
        tail_hierarchy_placeholder, tail_context_trans
    ], 1),
    [batch_size, latent_dim * 2 + hierarchy_dim * 2 + clatent_dim * 2, 1, 1])
# input_data = tf.reshape(tf.concat([head_trans, head_context_trans,
#                                    tail_trans, tail_context_trans], 1),
#                         [batch_size, latent_dim*2+clatent_dim*2, 1, 1])

print("input_data shape: ", input_data.shape)

result = lenet5(input_data, labels_placeholder, dim_y)
# print("@@@@@result:", result.softmax.shape)

pred_logit = result.softmax.softmax_activation()
accuracy_tensor = result.softmax.evaluate_classifier(labels_placeholder,
                                                     phase=pt.Phase.test)
precision_tensor, recall_tensor = result.softmax.evaluate_precision_recall(
    labels_placeholder, phase=pt.Phase.test)
_, auroc_tensor = tf.metrics.auc(labels_placeholder, pred_logit)
_, aupr_tensor = tf.metrics.auc(labels_placeholder, pred_logit, curve="PR")

optimizer = tf.train.GradientDescentOptimizer(learning_rate)
train_op = pt.apply_optimizer(optimizer, losses=[result.loss])

runner = pt.train.Runner()
best_f1 = 0