def test_return_non_default_batch_norm_params_keras_override(self):
        conv_hyperparams_text_proto = """
      regularizer {
        l2_regularizer {
        }
      }
      initializer {
        truncated_normal_initializer {
        }
      }
      batch_norm {
        decay: 0.7
        center: false
        scale: true
        epsilon: 0.03
      }
    """
        conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
        text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
        keras_config = hyperparams_builder.KerasLayerHyperparams(
            conv_hyperparams_proto)

        self.assertTrue(keras_config.use_batch_norm())
        batch_norm_params = keras_config.batch_norm_params(momentum=0.4)
        self.assertAlmostEqual(batch_norm_params['momentum'], 0.4)
        self.assertAlmostEqual(batch_norm_params['epsilon'], 0.03)
        self.assertFalse(batch_norm_params['center'])
        self.assertTrue(batch_norm_params['scale'])
Esempio n. 2
0
 def _build_conv_hyperparams(self):
     conv_hyperparams = hyperparams_pb2.Hyperparams()
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
 """
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams)
     return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams)
 def _build_conv_hyperparams(self):
     conv_hyperparams = hyperparams_pb2.Hyperparams()
     conv_hyperparams_text_proto = """
   activation: RELU_6
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
   batch_norm {
     scale: false
   }
 """
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams)
     return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams)
 def test_override_activation_keras(self):
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
   activation: RELU_6
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     keras_config = hyperparams_builder.KerasLayerHyperparams(
         conv_hyperparams_proto)
     new_params = keras_config.params(activation=tf.nn.relu)
     self.assertEqual(new_params['activation'], tf.nn.relu)
    def test_do_not_use_batch_norm_if_default_keras(self):
        conv_hyperparams_text_proto = """
      regularizer {
        l2_regularizer {
        }
      }
      initializer {
        truncated_normal_initializer {
        }
      }
    """
        conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
        text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
        keras_config = hyperparams_builder.KerasLayerHyperparams(
            conv_hyperparams_proto)
        self.assertFalse(keras_config.use_batch_norm())
        self.assertEqual(keras_config.batch_norm_params(), {})

        # The batch norm builder should build an identity Lambda layer
        identity_layer = keras_config.build_batch_norm()
        self.assertTrue(isinstance(identity_layer, tf.keras.layers.Lambda))
 def test_variance_in_range_with_random_normal_initializer_keras(self):
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     random_normal_initializer {
       mean: 0.0
       stddev: 0.8
     }
   }
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     keras_config = hyperparams_builder.KerasLayerHyperparams(
         conv_hyperparams_proto)
     initializer = keras_config.params()['kernel_initializer']
     self._assert_variance_in_range(initializer,
                                    shape=[100, 40],
                                    variance=0.64,
                                    tol=1e-1)
 def test_use_none_activation_keras(self):
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     truncated_normal_initializer {
     }
   }
   activation: NONE
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     keras_config = hyperparams_builder.KerasLayerHyperparams(
         conv_hyperparams_proto)
     self.assertEqual(keras_config.params()['activation'], None)
     self.assertEqual(
         keras_config.params(include_activation=True)['activation'], None)
     activation_layer = keras_config.build_activation_layer()
     self.assertTrue(isinstance(activation_layer, tf.keras.layers.Lambda))
     self.assertEqual(activation_layer.function, tf.identity)
    def test_return_l2_regularizer_weights_keras(self):
        conv_hyperparams_text_proto = """
      regularizer {
        l2_regularizer {
          weight: 0.42
        }
      }
      initializer {
        truncated_normal_initializer {
        }
      }
    """
        conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
        text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
        keras_config = hyperparams_builder.KerasLayerHyperparams(
            conv_hyperparams_proto)

        regularizer = keras_config.params()['kernel_regularizer']
        weights = np.array([1., -1, 4., 2.])
        with self.test_session() as sess:
            result = sess.run(regularizer(tf.constant(weights)))
        self.assertAllClose(np.power(weights, 2).sum() / 2.0 * 0.42, result)
 def test_variance_in_range_with_variance_scaling_initializer_uniform_keras(
         self):
     conv_hyperparams_text_proto = """
   regularizer {
     l2_regularizer {
     }
   }
   initializer {
     variance_scaling_initializer {
       factor: 2.0
       mode: FAN_IN
       uniform: true
     }
   }
 """
     conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
     text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
     keras_config = hyperparams_builder.KerasLayerHyperparams(
         conv_hyperparams_proto)
     initializer = keras_config.params()['kernel_initializer']
     self._assert_variance_in_range(initializer,
                                    shape=[100, 40],
                                    variance=2. / 100.)
def _build_ssd_feature_extractor(feature_extractor_config,
                                 is_training,
                                 freeze_batchnorm,
                                 reuse_weights=None):
    """Builds a ssd_meta_arch.SSDFeatureExtractor based on config.

  Args:
    feature_extractor_config: A SSDFeatureExtractor proto config from ssd.proto.
    is_training: True if this feature extractor is being built for training.
    freeze_batchnorm: Whether to freeze batch norm parameters during
      training or not. When training with a small batch size (e.g. 1), it is
      desirable to freeze batch norm update and use pretrained batch norm
      params.
    reuse_weights: if the feature extractor should reuse weights.

  Returns:
    ssd_meta_arch.SSDFeatureExtractor based on config.

  Raises:
    ValueError: On invalid feature extractor type.
  """
    feature_type = feature_extractor_config.type
    is_keras_extractor = feature_type in SSD_KERAS_FEATURE_EXTRACTOR_CLASS_MAP
    depth_multiplier = feature_extractor_config.depth_multiplier
    min_depth = feature_extractor_config.min_depth
    pad_to_multiple = feature_extractor_config.pad_to_multiple
    use_explicit_padding = feature_extractor_config.use_explicit_padding
    use_depthwise = feature_extractor_config.use_depthwise

    if is_keras_extractor:
        conv_hyperparams = hyperparams_builder.KerasLayerHyperparams(
            feature_extractor_config.conv_hyperparams)
    else:
        conv_hyperparams = hyperparams_builder.build(
            feature_extractor_config.conv_hyperparams, is_training)
    override_base_feature_extractor_hyperparams = (
        feature_extractor_config.override_base_feature_extractor_hyperparams)

    if (feature_type not in SSD_FEATURE_EXTRACTOR_CLASS_MAP) and (
            not is_keras_extractor):
        raise ValueError(
            'Unknown ssd feature_extractor: {}'.format(feature_type))

    if is_keras_extractor:
        feature_extractor_class = SSD_KERAS_FEATURE_EXTRACTOR_CLASS_MAP[
            feature_type]
    else:
        feature_extractor_class = SSD_FEATURE_EXTRACTOR_CLASS_MAP[feature_type]
    kwargs = {
        'is_training':
        is_training,
        'depth_multiplier':
        depth_multiplier,
        'min_depth':
        min_depth,
        'pad_to_multiple':
        pad_to_multiple,
        'use_explicit_padding':
        use_explicit_padding,
        'use_depthwise':
        use_depthwise,
        'override_base_feature_extractor_hyperparams':
        override_base_feature_extractor_hyperparams
    }

    if is_keras_extractor:
        kwargs.update({
            'conv_hyperparams': conv_hyperparams,
            'inplace_batchnorm_update': False,
            'freeze_batchnorm': freeze_batchnorm
        })
    else:
        kwargs.update({
            'conv_hyperparams_fn': conv_hyperparams,
            'reuse_weights': reuse_weights,
        })

    if feature_extractor_config.HasField('fpn'):
        kwargs.update({
            'fpn_min_level':
            feature_extractor_config.fpn.min_level,
            'fpn_max_level':
            feature_extractor_config.fpn.max_level,
            'additional_layer_depth':
            feature_extractor_config.fpn.additional_layer_depth,
        })

    return feature_extractor_class(**kwargs)