def evaluate(test_data, model_file): with tf.Graph().as_default() as g: with tf.device('cpu:0'): x = tf.placeholder(tf.float32, [ test_data.num_examples, DTN.IMAGE_SIZE, DTN.IMAGE_SIZE, DTN.NUM_CHANNELS ], name='x-input') y_ = tf.placeholder(tf.float32, [None, DTN.OUTPUT_NODE], name='y-input') _, y = DTN.inference(x, False, None, reuse=False, trainable=False) correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1)) accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32)) # accuracy_sum = tf.reduce_sum(tf.cast(correct_prediction, tf.float32)) # load saved model saver = tf.train.Saver() with tf.Session() as sess: saver.restore(sess, model_file) accuracy_score = sess.run( accuracy, feed_dict={ x: np.reshape(test_data.images, (test_data.num_examples, DTN.IMAGE_SIZE, DTN.IMAGE_SIZE, DTN.NUM_CHANNELS)), y_: test_data.labels }) print(model_file, "test accuracy = %f" % accuracy_score)
def pre_train(source_domain_data): xs = tf.placeholder( tf.float32, [PRE_BATCH_SIZE, DTN.IMAGE_SIZE, DTN.IMAGE_SIZE, DTN.NUM_CHANNELS], name='source-x-input') ys_ = tf.placeholder(tf.float32, [None, DTN.OUTPUT_NODE], name='source-y-input') regularizer = tf.contrib.layers.l2_regularizer(RAGULARZTION_RATE) _, ys = DTN.inference(xs, True, regularizer, reuse=False, trainable=True) cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits( logits=ys, labels=tf.argmax(ys_, 1)) cross_entropy_mean = tf.reduce_mean(cross_entropy) loss = cross_entropy_mean + tf.add_n(tf.get_collection('losses')) train = tf.train.AdamOptimizer(LEARNING_RATE).minimize(loss) saver = tf.train.Saver() with tf.Session() as sess: tf.global_variables_initializer().run() for i in range(PRE_TRAINING_STEPS): x, y = source_domain_data.next_batch(PRE_BATCH_SIZE) reshaped_xs = np.reshape(x, (PRE_BATCH_SIZE, DTN.IMAGE_SIZE, DTN.IMAGE_SIZE, DTN.NUM_CHANNELS)) loss_value, _ = sess.run([loss, train], feed_dict={ xs: reshaped_xs, ys_: y }) print("After %d training steps, loss on training batch is %f" % (i, loss_value)) saver.save(sess, os.path.join(MODEL_SAVE_PATH, PRE_MODEL_NAME))
xs = tf.placeholder( tf.float32, [BATCH_SIZE, DTN.IMAGE_SIZE, DTN.IMAGE_SIZE, DTN.NUM_CHANNELS], name='source-x-input') xt = tf.placeholder( tf.float32, [BATCH_SIZE, DTN.IMAGE_SIZE, DTN.IMAGE_SIZE, DTN.NUM_CHANNELS], name='target-x-input') ys_ = tf.placeholder(tf.float32, [None, DTN.OUTPUT_NODE], name='source-y-input') # trainable=False fix lower layer param _, _, source_fc, ys = DTN.inference( xs, False, new_regularizer, reuse=False, trainable=False) # False: without dropout _, _, target_fc, yt = DTN.inference( xt, False, new_regularizer, reuse=True, trainable=False) # pesudo logit: yt cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits( logits=ys, labels=tf.argmax(ys, 1)) # 加上target y中有标签的部分 cross_entropy_mean = tf.reduce_mean(cross_entropy) mmd_mar, mmd_con = con_MMD(source_fc, ys_, target_fc, yt) loss = cross_entropy_mean + theta1 * mmd_mar + theta2 * mmd_con + tf.add_n( tf.get_collection('losses'))
DTN.IMAGE_SIZE, DTN.NUM_CHANNELS], name='source-x-input') xt = tf.placeholder(tf.float32, [target_domain_data.num_examples, DTN.IMAGE_SIZE, DTN.IMAGE_SIZE, DTN.NUM_CHANNELS], name='target-x-input') ys_ = tf.placeholder(tf.float32, [None, DTN.OUTPUT_NODE], name='source-y-input') yt_ = tf.placeholder(tf.float32, [None, DTN.OUTPUT_NODE], name='source-y-input') # trainable=False fix lower layer param s_pool, s_fc1, s_fc2, ys = DTN.inference(xs, False, None, reuse=False, trainable=False) # False: without dropout t_pool, t_fc1, t_fc2, yt = DTN.inference(xt, False, None, reuse=True, trainable=False) # pesudo logit: yt # TODO ys_: ground truth label, yt: predicted label pool_mmd_mar, pool_mmd_con = con_MMD(s_pool, ys_, t_pool, yt) fc1_mmd_mar, fc1_mmd_con = con_MMD(s_fc1, ys_, t_fc1, yt) fc2_mmd_mar, fc2_mmd_con = con_MMD(s_fc2, ys_, t_fc2, yt) # TODO learning rate decay variables = slim.get_variables_to_restore() variables_to_restore = [v for v in variables if v.name.split('_')[0] != 'step'] saver = tf.train.Saver(variables_to_restore) with tf.Session() as sess: # print(variables_to_restore)