def testReuseUpdateOps(self): height, width = 3, 3 with self.test_session(): images = tf.random_uniform((5, height, width, 3), seed=1) ops.batch_norm(images, scope='bn') self.assertEquals(len(tf.get_collection(ops.UPDATE_OPS_COLLECTION)), 2) ops.batch_norm(images, scope='bn', reuse=True) self.assertEquals(len(tf.get_collection(ops.UPDATE_OPS_COLLECTION)), 4)
def testMovingAverageVariables(self): height, width = 3, 3 with self.test_session(): images = tf.random_uniform((5, height, width, 3), seed=1) ops.batch_norm(images, scale=True) moving_mean = tf.moving_average_variables()[0] moving_variance = tf.moving_average_variables()[1] self.assertEquals(moving_mean.op.name, 'BatchNorm/moving_mean') self.assertEquals(moving_variance.op.name, 'BatchNorm/moving_variance')
def testReuseVariables(self): height, width = 3, 3 with self.test_session(): images = tf.random_uniform((5, height, width, 3), seed=1) ops.batch_norm(images, scale=True, scope='bn') ops.batch_norm(images, scale=True, scope='bn', reuse=True) beta = variables.get_variables_by_name('beta') gamma = variables.get_variables_by_name('gamma') self.assertEquals(len(beta), 1) self.assertEquals(len(gamma), 1) moving_vars = tf.get_collection('moving_vars') self.assertEquals(len(moving_vars), 2)
def testUpdateOps(self): height, width = 3, 3 with self.test_session(): images = tf.random_uniform((5, height, width, 3), seed=1) ops.batch_norm(images) update_ops = tf.get_collection(ops.UPDATE_OPS_COLLECTION) update_moving_mean = update_ops[0] update_moving_variance = update_ops[1] self.assertEquals(update_moving_mean.op.name, 'BatchNorm/AssignMovingAvg') self.assertEquals(update_moving_variance.op.name, 'BatchNorm/AssignMovingAvg_1')
def testCreateVariablesWithoutCenterWithoutScale(self): height, width = 3, 3 with self.test_session(): images = tf.random_uniform((5, height, width, 3), seed=1) ops.batch_norm(images, center=False, scale=False) beta = variables.get_variables_by_name('beta') self.assertEquals(beta, []) gamma = variables.get_variables_by_name('gamma') self.assertEquals(gamma, []) moving_mean = tf.moving_average_variables()[0] moving_variance = tf.moving_average_variables()[1] self.assertEquals(moving_mean.op.name, 'BatchNorm/moving_mean') self.assertEquals(moving_variance.op.name, 'BatchNorm/moving_variance')
def testReuseVars(self): height, width = 3, 3 with self.test_session() as sess: image_shape = (10, height, width, 3) image_values = np.random.rand(*image_shape) expected_mean = np.mean(image_values, axis=(0, 1, 2)) expected_var = np.var(image_values, axis=(0, 1, 2)) images = tf.constant(image_values, shape=image_shape, dtype=tf.float32) output = ops.batch_norm(images, decay=0.1, is_training=False) update_ops = tf.get_collection(ops.UPDATE_OPS_COLLECTION) with tf.control_dependencies(update_ops): barrier = tf.no_op(name='gradient_barrier') output = control_flow_ops.with_dependencies([barrier], output) # Initialize all variables sess.run(tf.initialize_all_variables()) moving_mean = variables.get_variables('BatchNorm/moving_mean')[0] moving_variance = variables.get_variables('BatchNorm/moving_variance')[0] mean, variance = sess.run([moving_mean, moving_variance]) # After initialization moving_mean == 0 and moving_variance == 1. self.assertAllClose(mean, [0] * 3) self.assertAllClose(variance, [1] * 3) # Simulate assigment from saver restore. init_assigns = [tf.assign(moving_mean, expected_mean), tf.assign(moving_variance, expected_var)] sess.run(init_assigns) for _ in range(10): sess.run([output], {images: np.random.rand(*image_shape)}) mean = moving_mean.eval() variance = moving_variance.eval() # Although we feed different images, the moving_mean and moving_variance # shouldn't change. self.assertAllClose(mean, expected_mean) self.assertAllClose(variance, expected_var)
def testComputeMovingVars(self): height, width = 3, 3 with self.test_session() as sess: image_shape = (10, height, width, 3) image_values = np.random.rand(*image_shape) expected_mean = np.mean(image_values, axis=(0, 1, 2)) expected_var = np.var(image_values, axis=(0, 1, 2)) images = tf.constant(image_values, shape=image_shape, dtype=tf.float32) output = ops.batch_norm(images, decay=0.1) update_ops = tf.get_collection(ops.UPDATE_OPS_COLLECTION) with tf.control_dependencies(update_ops): barrier = tf.no_op(name='gradient_barrier') output = control_flow_ops.with_dependencies([barrier], output) # Initialize all variables sess.run(tf.initialize_all_variables()) moving_mean = variables.get_variables('BatchNorm/moving_mean')[0] moving_variance = variables.get_variables('BatchNorm/moving_variance')[0] mean, variance = sess.run([moving_mean, moving_variance]) # After initialization moving_mean == 0 and moving_variance == 1. self.assertAllClose(mean, [0] * 3) self.assertAllClose(variance, [1] * 3) for _ in range(10): sess.run([output]) mean = moving_mean.eval() variance = moving_variance.eval() # After 10 updates with decay 0.1 moving_mean == expected_mean and # moving_variance == expected_var. self.assertAllClose(mean, expected_mean) self.assertAllClose(variance, expected_var)
def testCreateOp(self): height, width = 3, 3 with self.test_session(): images = tf.random_uniform((5, height, width, 3), seed=1) output = ops.batch_norm(images) self.assertTrue(output.op.name.startswith('BatchNorm/batchnorm')) self.assertListEqual(output.get_shape().as_list(), [5, height, width, 3])
def testCreateMovingVars(self): height, width = 3, 3 with self.test_session(): images = tf.random_uniform((5, height, width, 3), seed=1) _ = ops.batch_norm(images, moving_vars='moving_vars') moving_mean = tf.get_collection('moving_vars', 'BatchNorm/moving_mean') self.assertEquals(len(moving_mean), 1) self.assertEquals(moving_mean[0].op.name, 'BatchNorm/moving_mean') moving_variance = tf.get_collection('moving_vars', 'BatchNorm/moving_variance') self.assertEquals(len(moving_variance), 1) self.assertEquals(moving_variance[0].op.name, 'BatchNorm/moving_variance')