def test_return_non_default_batch_norm_params_keras_override(self):
        conv_hyperparams_text_proto = """
      regularizer {
        l2_regularizer {
        }
      }
      initializer {
        truncated_normal_initializer {
        }
      }
      batch_norm {
        decay: 0.7
        center: false
        scale: true
        epsilon: 0.03
      }
    """
        conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
        text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
        keras_config = hyperparams_builder.KerasLayerHyperparams(
            conv_hyperparams_proto)

        self.assertTrue(keras_config.use_batch_norm())
        batch_norm_params = keras_config.batch_norm_params(momentum=0.4)
        self.assertAlmostEqual(batch_norm_params['momentum'], 0.4)
        self.assertAlmostEqual(batch_norm_params['epsilon'], 0.03)
        self.assertFalse(batch_norm_params['center'])
        self.assertTrue(batch_norm_params['scale'])
 def test_return_batch_norm_params_with_notrain_when_train_is_false(self):
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
   batch_norm {
     decay: 0.7
     center: false
     scale: true
     epsilon: 0.03
     train: false
   }
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
                                          is_training=True)
     scope = scope_fn()
     conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
     self.assertEqual(conv_scope_arguments['normalizer_fn'],
                      slim.batch_norm)
     batch_norm_params = scope[_get_scope_key(slim.batch_norm)]
     self.assertAlmostEqual(batch_norm_params['decay'], 0.7)
     self.assertAlmostEqual(batch_norm_params['epsilon'], 0.03)
     self.assertFalse(batch_norm_params['center'])
     self.assertTrue(batch_norm_params['scale'])
     self.assertFalse(batch_norm_params['is_training'])
Exemplo n.º 3
0
 def _build_arg_scope_with_conv_hyperparams(self):
     conv_hyperparams = hyperparams_pb2.Hyperparams()
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
 """
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams)
     return hyperparams_builder.build(conv_hyperparams, is_training=True)
Exemplo n.º 4
0
 def _build_conv_hyperparams(self):
     conv_hyperparams = hyperparams_pb2.Hyperparams()
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
 """
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams)
     return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams)
Exemplo n.º 5
0
 def _build_fc_hyperparams(self, op_type=hyperparams_pb2.Hyperparams.FC):
     hyperparams = hyperparams_pb2.Hyperparams()
     hyperparams_text_proto = """
   activation: NONE
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
 """
     text_format.Merge(hyperparams_text_proto, hyperparams)
     hyperparams.op = op_type
     return hyperparams_builder.KerasLayerHyperparams(hyperparams)
 def test_default_arg_scope_has_conv2d_transpose_op(self):
     conv_hyperparams_text_proto = """
   regularizer {
     l1_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
                                          is_training=True)
     scope = scope_fn()
     self.assertTrue(_get_scope_key(slim.conv2d_transpose) in scope)
 def _build_conv_arg_scope_no_batch_norm(self):
     conv_hyperparams = hyperparams_pb2.Hyperparams()
     conv_hyperparams_text_proto = """
   activation: RELU_6
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     random_normal_initializer {
       stddev: 0.01
       mean: 0.0
     }
   }
 """
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams)
     return hyperparams_builder.build(conv_hyperparams, is_training=True)
 def _build_arg_scope_with_hyperparams(
         self, op_type=hyperparams_pb2.Hyperparams.FC):
     hyperparams = hyperparams_pb2.Hyperparams()
     hyperparams_text_proto = """
   activation: NONE
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
 """
     text_format.Merge(hyperparams_text_proto, hyperparams)
     hyperparams.op = op_type
     return hyperparams_builder.build(hyperparams, is_training=True)
 def test_do_not_use_batch_norm_if_default(self):
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
                                          is_training=True)
     scope = scope_fn()
     conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
     self.assertEqual(conv_scope_arguments['normalizer_fn'], None)
Exemplo n.º 10
0
 def test_explicit_fc_op_arg_scope_has_fully_connected_op(self):
     conv_hyperparams_text_proto = """
   op: FC
   regularizer {
     l1_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
                                          is_training=True)
     scope = scope_fn()
     self.assertTrue(_get_scope_key(slim.fully_connected) in scope)
Exemplo n.º 11
0
 def test_override_activation_keras(self):
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
   activation: RELU_6
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     keras_config = hyperparams_builder.KerasLayerHyperparams(
         conv_hyperparams_proto)
     new_params = keras_config.params(activation=tf.nn.relu)
     self.assertEqual(new_params['activation'], tf.nn.relu)
Exemplo n.º 12
0
 def test_use_relu_6_activation(self):
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
   activation: RELU_6
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
                                          is_training=True)
     scope = scope_fn()
     conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
     self.assertEqual(conv_scope_arguments['activation_fn'], tf.nn.relu6)
Exemplo n.º 13
0
 def test_separable_conv2d_and_conv2d_and_transpose_have_same_parameters(
         self):
     conv_hyperparams_text_proto = """
   regularizer {
     l1_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
                                          is_training=True)
     scope = scope_fn()
     kwargs_1, kwargs_2, kwargs_3 = scope.values()
     self.assertDictEqual(kwargs_1, kwargs_2)
     self.assertDictEqual(kwargs_1, kwargs_3)
Exemplo n.º 14
0
    def test_do_not_use_batch_norm_if_default_keras(self):
        conv_hyperparams_text_proto = """
      regularizer {
        l2_regularizer {
        }
      }
      initializer {
        truncated_normal_initializer {
        }
      }
    """
        conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
        text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
        keras_config = hyperparams_builder.KerasLayerHyperparams(
            conv_hyperparams_proto)
        self.assertFalse(keras_config.use_batch_norm())
        self.assertEqual(keras_config.batch_norm_params(), {})

        # The batch norm builder should build an identity Lambda layer
        identity_layer = keras_config.build_batch_norm()
        self.assertTrue(isinstance(identity_layer, tf.keras.layers.Lambda))
Exemplo n.º 15
0
 def _build_conv_hyperparams(self):
     conv_hyperparams = hyperparams_pb2.Hyperparams()
     conv_hyperparams_text_proto = """
   activation: RELU_6
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
   batch_norm {
     train: true,
     scale: false,
     center: true,
     decay: 0.2,
     epsilon: 0.1,
   }
 """
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams)
     return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams)
Exemplo n.º 16
0
    def test_return_l2_regularizer_weights_keras(self):
        conv_hyperparams_text_proto = """
      regularizer {
        l2_regularizer {
          weight: 0.42
        }
      }
      initializer {
        truncated_normal_initializer {
        }
      }
    """
        conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
        text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
        keras_config = hyperparams_builder.KerasLayerHyperparams(
            conv_hyperparams_proto)

        regularizer = keras_config.params()['kernel_regularizer']
        weights = np.array([1., -1, 4., 2.])
        with self.test_session() as sess:
            result = sess.run(regularizer(tf.constant(weights)))
        self.assertAllClose(np.power(weights, 2).sum() / 2.0 * 0.42, result)
Exemplo n.º 17
0
 def test_variance_in_range_with_random_normal_initializer_keras(self):
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     random_normal_initializer {
       mean: 0.0
       stddev: 0.8
     }
   }
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     keras_config = hyperparams_builder.KerasLayerHyperparams(
         conv_hyperparams_proto)
     initializer = keras_config.params()['kernel_initializer']
     self._assert_variance_in_range(initializer,
                                    shape=[100, 40],
                                    variance=0.64,
                                    tol=1e-1)
Exemplo n.º 18
0
 def test_use_none_activation_keras(self):
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
   activation: NONE
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     keras_config = hyperparams_builder.KerasLayerHyperparams(
         conv_hyperparams_proto)
     self.assertEqual(keras_config.params()['activation'], None)
     self.assertEqual(
         keras_config.params(include_activation=True)['activation'], None)
     activation_layer = keras_config.build_activation_layer()
     self.assertTrue(isinstance(activation_layer, tf.keras.layers.Lambda))
     self.assertEqual(activation_layer.function, tf.identity)
Exemplo n.º 19
0
 def _build_conv_hyperparams(self, add_batch_norm=True):
     conv_hyperparams = hyperparams_pb2.Hyperparams()
     conv_hyperparams_text_proto = """
   activation: RELU_6
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
 """
     if add_batch_norm:
         batch_norm_proto = """
     batch_norm {
       scale: false
     }
   """
         conv_hyperparams_text_proto += batch_norm_proto
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams)
     return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams)
Exemplo n.º 20
0
 def _build_conv_hyperparams(self):
     conv_hyperparams = hyperparams_pb2.Hyperparams()
     conv_hyperparams_text_proto = """
   activation: RELU_6,
   regularizer {
     l2_regularizer {
       weight: 0.0004
     }
   }
   initializer {
     truncated_normal_initializer {
       stddev: 0.03
       mean: 0.0
     }
   }
   batch_norm {
     scale: true,
     decay: 0.997,
     epsilon: 0.001,
   }
 """
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams)
     return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams)
Exemplo n.º 21
0
 def test_return_l1_regularized_weights(self):
     conv_hyperparams_text_proto = """
   regularizer {
     l1_regularizer {
       weight: 0.5
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
                                          is_training=True)
     scope = scope_fn()
     conv_scope_arguments = scope.values()[0]
     regularizer = conv_scope_arguments['weights_regularizer']
     weights = np.array([1., -1, 4., 2.])
     with self.test_session() as sess:
         result = sess.run(regularizer(tf.constant(weights)))
     self.assertAllClose(np.abs(weights).sum() * 0.5, result)
Exemplo n.º 22
0
 def test_variance_in_range_with_variance_scaling_initializer_uniform_keras(
         self):
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     variance_scaling_initializer {
       factor: 2.0
       mode: FAN_IN
       uniform: true
     }
   }
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     keras_config = hyperparams_builder.KerasLayerHyperparams(
         conv_hyperparams_proto)
     initializer = keras_config.params()['kernel_initializer']
     self._assert_variance_in_range(initializer,
                                    shape=[100, 40],
                                    variance=2. / 100.)
Exemplo n.º 23
0
    def test_return_l2_regularizer_weights(self):
        conv_hyperparams_text_proto = """
      regularizer {
        l2_regularizer {
          weight: 0.42
        }
      }
      initializer {
        truncated_normal_initializer {
        }
      }
    """
        conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
        text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
        scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
                                             is_training=True)
        scope = scope_fn()
        conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]

        regularizer = conv_scope_arguments['weights_regularizer']
        weights = np.array([1., -1, 4., 2.])
        with self.test_session() as sess:
            result = sess.run(regularizer(tf.constant(weights)))
        self.assertAllClose(np.power(weights, 2).sum() / 2.0 * 0.42, result)
Exemplo n.º 24
0
 def test_variance_in_range_with_random_normal_initializer(self):
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     random_normal_initializer {
       mean: 0.0
       stddev: 0.8
     }
   }
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
                                          is_training=True)
     scope = scope_fn()
     conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
     initializer = conv_scope_arguments['weights_initializer']
     self._assert_variance_in_range(initializer,
                                    shape=[100, 40],
                                    variance=0.64,
                                    tol=1e-1)
Exemplo n.º 25
0
 def test_variance_in_range_with_variance_scaling_initializer_uniform(self):
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     variance_scaling_initializer {
       factor: 2.0
       mode: FAN_IN
       uniform: true
     }
   }
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
                                          is_training=True)
     scope = scope_fn()
     conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
     initializer = conv_scope_arguments['weights_initializer']
     self._assert_variance_in_range(initializer,
                                    shape=[100, 40],
                                    variance=2. / 100.)