def testOneDimensionalCalibrationLambda(self): with ops.Graph().as_default(): num_keypoints = 10 def kp_in_fn(*args, **kwargs): return math_ops.linspace(0., 1., num_keypoints) def kp_out_fn(*args, **kwargs): return math_ops.linspace(float(_DEFAULT_OUTPUT_MIN), float(_DEFAULT_OUTPUT_MAX), num_keypoints) uncalibrated = array_ops.placeholder( shape=tensor_shape.unknown_shape(ndims=1), dtype=dtypes.float32) calibrated, _, regularization = ( pwl_calibration_layers.one_dimensional_calibration_layer( uncalibrated, missing_input_value=0.21, num_keypoints=num_keypoints, bound=True, signal_name='test_one_dimensional_calibration_layer_lambda', keypoints_initializer_fns=(kp_in_fn, kp_out_fn))) self.assertEqual(regularization, None) with self.test_session() as sess: sess.run(variables.global_variables_initializer()) self._CheckOneDimensionalCalibrationLayer( sess, uncalibrated, calibrated, [0.5], [250.]) self._CheckOneDimensionalCalibrationLayer( sess, uncalibrated, calibrated, [0.2, 0.7], [220., 270.])
def testMissingFixedOutput(self): with ops.Graph().as_default(): num_keypoints = 10 keypoints_init = self._UniformKeypoints(num_keypoints) uncalibrated = array_ops.placeholder( shape=tensor_shape.unknown_shape(ndims=1), dtype=dtypes.float32) calibrated, projection, regularization = ( pwl_calibration_layers.one_dimensional_calibration_layer( uncalibrated, num_keypoints=num_keypoints, signal_name='test_missing_fixed_output', keypoints_initializers=keypoints_init, bound=True, missing_input_value=-1., missing_output_value=7.)) self.assertNotEqual(projection, None) self.assertEqual(regularization, None) with self.test_session() as sess: sess.run(variables.global_variables_initializer()) # Mix of missing and calibrated: self._CheckOneDimensionalCalibrationLayer( sess, uncalibrated, calibrated, [0.5, -1.], [250., 7.]) # Only calibrated: self._CheckOneDimensionalCalibrationLayer( sess, uncalibrated, calibrated, [0.2, 0.7], [220., 270.]) # Only missing: self._CheckOneDimensionalCalibrationLayer( sess, uncalibrated, calibrated, [-1., -1.], [7., 7.]) # Projection shouldn't affect the missing output value, even though # it is outside the bounds. sess.run([projection]) self._CheckOneDimensionalCalibrationLayer( sess, uncalibrated, calibrated, [-1., -1.], [7., 7.])
def testOneDimensionalCalibrationLambda(self): with tf.Graph().as_default(): num_keypoints = 10 def kp_in_fn(*args, **kwargs): del args del kwargs return tf.linspace(0., 1., num_keypoints) def kp_out_fn(*args, **kwargs): del args del kwargs return tf.linspace(float(_DEFAULT_OUTPUT_MIN), float(_DEFAULT_OUTPUT_MAX), num_keypoints) uncalibrated = tf.compat.v1.placeholder(shape=tf.TensorShape( [tf.compat.v1.Dimension(None)]), dtype=tf.float32) calibrated, _, regularization = ( pwl_calibration_layers.one_dimensional_calibration_layer( uncalibrated, missing_input_value=0.21, num_keypoints=num_keypoints, bound=True, signal_name='test_one_dimensional_calibration_layer_lambda', keypoints_initializer_fns=(kp_in_fn, kp_out_fn))) self.assertEqual(regularization, None) with self.session() as sess: sess.run(tf.compat.v1.global_variables_initializer()) self._CheckOneDimensionalCalibrationLayer( sess, uncalibrated, calibrated, [0.5], [250.]) self._CheckOneDimensionalCalibrationLayer( sess, uncalibrated, calibrated, [0.2, 0.7], [220., 270.])
def testMonotonicity(self): # Create a monotonic calibration, then set it in a non-monotonic way and # check that it is projected back to monotonicity. with ops.Graph().as_default(): num_keypoints = 5 keypoints_init = keypoints_initialization.uniform_keypoints_for_signal( num_keypoints=num_keypoints, input_min=array_ops.constant(0.0, dtype=dtypes.float32), input_max=array_ops.constant(1.0, dtype=dtypes.float32), output_min=0., output_max=1., dtype=dtypes.float32) uncalibrated = array_ops.placeholder( shape=tensor_shape.unknown_shape(ndims=1), dtype=dtypes.float32) with variable_scope.variable_scope('test_monotonicity'): _, projection, regularization = ( pwl_calibration_layers.one_dimensional_calibration_layer( uncalibrated, num_keypoints=num_keypoints, monotonic=1, signal_name='monotonic_x', keypoints_initializers=keypoints_init)) self.assertTrue(projection is not None) self.assertEqual(regularization, None) with self.test_session() as sess: # First initialize keypoints (and all variables) sess.run(variables.global_variables_initializer()) kp_out = _get_variable_by_name( 'test_monotonicity/pwl_calibration/monotonic_x_keypoints_outputs:0' ) kp_out_values = sess.run(kp_out) self.assertAllClose(kp_out_values, [0.0, 0.25, 0.5, 0.75, 1.0]) # Assign non_monotonic calibration. non_monotonic = [4., 5., 0., 4., -3.] sess.run( state_ops.assign( kp_out, array_ops.constant(non_monotonic, dtype=dtypes.float32))) kp_out_values = sess.run(kp_out) self.assertAllClose(kp_out_values, non_monotonic) # Execute projection. sess.run(projection) kp_out_values = sess.run(kp_out) self.assertAllClose(kp_out_values, [2., 2., 2., 2., 2.])
def testBoundness(self): # Create a bound calibration, then set it outside the bounds and check # that it is projected back to the bounds. with ops.Graph().as_default(): num_keypoints = 3 keypoints_init = keypoints_initialization.uniform_keypoints_for_signal( num_keypoints=num_keypoints, input_min=array_ops.constant(0.0, dtype=dtypes.float32), input_max=array_ops.constant(1.0, dtype=dtypes.float32), output_min=0., output_max=1., dtype=dtypes.float32) uncalibrated = array_ops.placeholder( shape=tensor_shape.unknown_shape(ndims=1), dtype=dtypes.float32) with variable_scope.variable_scope('test_boundness'): _, projection, regularization = ( pwl_calibration_layers.one_dimensional_calibration_layer( uncalibrated, num_keypoints=num_keypoints, bound=True, signal_name='bounded_x', keypoints_initializers=keypoints_init)) self.assertTrue(projection is not None) self.assertEqual(regularization, None) with self.test_session() as sess: # First initialize keypoints (and all variables) sess.run(variables.global_variables_initializer()) kp_out = _get_variable_by_name( 'test_boundness/pwl_calibration/bounded_x_keypoints_outputs:0' ) kp_out_values = sess.run(kp_out) self.assertAllClose(kp_out_values, [0.0, 0.5, 1.0]) # Assign values to variable beyond bounds. out_of_bounds = [-0.1, 1.2, 0.9] sess.run( state_ops.assign( kp_out, array_ops.constant(out_of_bounds, dtype=dtypes.float32))) kp_out_values = sess.run(kp_out) self.assertAllClose(kp_out_values, out_of_bounds) # Execute projection. sess.run(projection) kp_out_values = sess.run(kp_out) self.assertAllClose(kp_out_values, [0.0, 1.0, 0.9])
def testMissingCalibratedOutput(self): with tf.Graph().as_default(): # With calibration: num_keypoints = 10 keypoints_init = self._UniformKeypoints(num_keypoints) uncalibrated = tf.compat.v1.placeholder(shape=tf.TensorShape( [tf.compat.v1.Dimension(None)]), dtype=tf.float32) calibrated, projection, regularization = ( pwl_calibration_layers.one_dimensional_calibration_layer( uncalibrated, num_keypoints=num_keypoints, signal_name='test_missing_calibrated_output', keypoints_initializers=keypoints_init, bound=True, missing_input_value=-1.)) self.assertNotEqual(projection, None) self.assertEqual(regularization, None) with self.session() as sess: sess.run(tf.compat.v1.global_variables_initializer()) self._CheckOneDimensionalCalibrationLayer( sess, uncalibrated, calibrated, [0.5, -1.], [250., _DEFAULT_OUTPUT_MIN]) # Set out-of-bound value for missing value. missing_calibrated_output = _get_variable_by_name( 'pwl_calibration/' 'test_missing_calibrated_output_calibrated_missing_output:0' ) sess.run( [tf.compat.v1.assign(missing_calibrated_output, 700.0)]) self._CheckOneDimensionalCalibrationLayer( sess, uncalibrated, calibrated, [-1.], [700.]) # Project to bound. sess.run(projection) self._CheckOneDimensionalCalibrationLayer( sess, uncalibrated, calibrated, [-1.], [_DEFAULT_OUTPUT_MAX]) # Gradient wrt missing_calibrated_output should be 1.0 d_calibrated_wrt_d_output = tf.gradients( calibrated, missing_calibrated_output) got = sess.run(d_calibrated_wrt_d_output, feed_dict={uncalibrated: [-1.]}) self.assertAllClose(got, [1.])
def testOneDimensionalCalibrationLayer(self): with ops.Graph().as_default(): num_keypoints = 10 keypoints_init = self._UniformKeypoints(num_keypoints) uncalibrated = array_ops.placeholder( shape=tensor_shape.unknown_shape(ndims=1), dtype=dtypes.float32) calibrated, projection, regularization = ( pwl_calibration_layers.one_dimensional_calibration_layer( uncalibrated, num_keypoints=num_keypoints, signal_name='test_one_dimensional_calibration_layer', keypoints_initializers=keypoints_init)) self.assertEqual(projection, None) self.assertEqual(regularization, None) with self.test_session() as sess: sess.run(variables.global_variables_initializer()) self._CheckOneDimensionalCalibrationLayer( sess, uncalibrated, calibrated, [0.5], [250.]) self._CheckOneDimensionalCalibrationLayer( sess, uncalibrated, calibrated, [0.2, 0.7], [220., 270.])
def testOneDimensionalCalibrationLayer(self): with tf.Graph().as_default(): num_keypoints = 10 keypoints_init = self._UniformKeypoints(num_keypoints) uncalibrated = tf.compat.v1.placeholder(shape=tf.TensorShape( [tf.compat.v1.Dimension(None)]), dtype=tf.float32) calibrated, projection, regularization = ( pwl_calibration_layers.one_dimensional_calibration_layer( uncalibrated, num_keypoints=num_keypoints, signal_name='test_one_dimensional_calibration_layer', keypoints_initializers=keypoints_init)) self.assertEqual(projection, None) self.assertEqual(regularization, None) with self.session() as sess: sess.run(tf.compat.v1.global_variables_initializer()) self._CheckOneDimensionalCalibrationLayer( sess, uncalibrated, calibrated, [0.5], [250.]) self._CheckOneDimensionalCalibrationLayer( sess, uncalibrated, calibrated, [0.2, 0.7], [220., 270.])
def testOneDimensionalCalibrationLayerRegularization(self): with ops.Graph().as_default(): num_keypoints = 10 keypoints_init = self._UniformKeypoints(num_keypoints) uncalibrated = array_ops.placeholder( shape=tensor_shape.unknown_shape(ndims=1), dtype=dtypes.float32) _, _, regularization = ( pwl_calibration_layers.one_dimensional_calibration_layer( uncalibrated, num_keypoints=num_keypoints, signal_name='test_one_dimensional_calibration_layer', l1_reg=1.0, l2_reg=1.0, l1_laplacian_reg=1.0, l2_laplacian_reg=1.0, keypoints_initializers=keypoints_init)) with self.test_session() as sess: sess.run(variables.global_variables_initializer()) got = sess.run(regularization) expected_value = 638896.25 self.assertAlmostEqual(got, expected_value, delta=1e-1)
def testOneDimensionalCalibrationLayerRegularization(self): with tf.Graph().as_default(): num_keypoints = 10 keypoints_init = self._UniformKeypoints(num_keypoints) uncalibrated = tf.compat.v1.placeholder(shape=tf.TensorShape( [tf.compat.v1.Dimension(None)]), dtype=tf.float32) _, _, regularization = ( pwl_calibration_layers.one_dimensional_calibration_layer( uncalibrated, num_keypoints=num_keypoints, signal_name='test_one_dimensional_calibration_layer', l1_reg=1.0, l2_reg=1.0, l1_laplacian_reg=1.0, l2_laplacian_reg=1.0, keypoints_initializers=keypoints_init)) with self.session() as sess: sess.run(tf.compat.v1.global_variables_initializer()) got = sess.run(regularization) expected_value = 638896.25 self.assertAlmostEqual(got, expected_value, delta=1e-1)