def testBatchNormScopeDoesHasIsTrainingWhenItsNotNone(self):
   sc = mobilenet.training_scope(is_training=False)
   self.assertIn('is_training', sc[slim.arg_scope_func_key(slim.batch_norm)])
   sc = mobilenet.training_scope(is_training=True)
   self.assertIn('is_training', sc[slim.arg_scope_func_key(slim.batch_norm)])
   sc = mobilenet.training_scope()
   self.assertIn('is_training', sc[slim.arg_scope_func_key(slim.batch_norm)])
Example #2
0
def training_scope(**kwargs):
  """Defines MobilenetV2 training scope.
  Usage:
     with tf.contrib.slim.arg_scope(mobilenet_v2.training_scope()):
       logits, endpoints = mobilenet_v2.mobilenet(input_tensor)
  with slim.
  Args:
    **kwargs: Passed to mobilenet.training_scope. The following parameters
    are supported:
      weight_decay- The weight decay to use for regularizing the model.
      stddev-  Standard deviation for initialization, if negative uses xavier.
      dropout_keep_prob- dropout keep probability
      bn_decay- decay for the batch norm moving averages.
  Returns:
    An `arg_scope` to use for the mobilenet v2 model.
  """
  return lib.training_scope(**kwargs)
Example #3
0
def training_scope(**kwargs):
    """Defines MobilenetV2 training scope.
    Usage:
       with tf.contrib.slim.arg_scope(mobilenet_v2.training_scope()):
         logits, endpoints = mobilenet_v2.mobilenet(input_tensor)
    with slim.
    Args:
      **kwargs: Passed to mobilenet.training_scope. The following parameters
      are supported:
        weight_decay- The weight decay to use for regularizing the model.
        stddev-  Standard deviation for initialization, if negative uses xavier.
        dropout_keep_prob- dropout keep probability
        bn_decay- decay for the batch norm moving averages.
    Returns:
      An `arg_scope` to use for the mobilenet v2 model.
    """
    return lib.training_scope(**kwargs)
Example #4
0
 def testBatchNormScopeDoesNotHaveIsTrainingWhenItsSetToNone(self):
     sc = mobilenet.training_scope(is_training=None)
     self.assertNotIn('is_training',
                      sc[slim.arg_scope_func_key(slim.batch_norm)])
Example #5
0
 def testBatchNormScopeDoesNotHaveIsTrainingWhenItsSetToNone(self):
   sc = mobilenet.training_scope(is_training=None)
   self.assertNotIn('is_training', sc[slim.arg_scope_func_key(
       slim.batch_norm)])