def test_onehot(): x = tf.constant(np.array([[1], [3]], dtype='float32')) onehot_t = modeling.onehot(x, 10) init() onehot = sess.run(onehot_t) assert onehot.shape == (2, 10) assert onehot[0][1] == 1 assert onehot[1][3] == 1 assert np.sum(onehot[0]) == 1 assert np.sum(onehot) == 2
def cost(y, logits, regularize=False, l2_weight=0.01): batch_size = logits.get_shape().as_list()[0] with tf.name_scope('cost'): with tf.name_scope('xentropy'): xentropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits( logits, tf.cast(modeling.onehot(y, NUM_CLASSES), 'float'))) if regularize: l2 = modeling.l2_penalty(l2_weight) tot_cost = xentropy if regularize: tot_cost = tf.add(xentropy, l2) tf.scalar_summary('cost', tot_cost) return tot_cost