Example #1
0
def batch_norm(inputs,
               decay=0.999,
               center=True,
               scale=False,
               epsilon=0.001,
               activation_fn=None,
               param_initializers=None,
               param_regularizers=None,
               updates_collections=ops.GraphKeys.UPDATE_OPS,
               is_training=True,
               reuse=None,
               variables_collections=None,
               outputs_collections=None,
               trainable=True,
               batch_weights=None,
               fused=False,
               data_format=DATA_FORMAT_NHWC,
               zero_debias_moving_mean=False,
               scope=None,
               renorm=False,
               renorm_clipping=None,
               renorm_decay=0.99):
    scope = _default_scope(scope, 'BATCH_NORM', 'BatchNorm')
    if data_format not in (DATA_FORMAT_NCHW, DATA_FORMAT_NHWC):
        raise ValueError('data_format has to be either NCHW or NHWC.')
    axis = 1 if data_format == DATA_FORMAT_NCHW else -1

    with vs.variable_scope(scope, reuse=reuse) as sc:
        if not param_initializers:
            param_initializers = {}
        beta_initializer = param_initializers.get('beta', init_ops.zeros_initializer())
        gamma_initializer = param_initializers.get('gamma', init_ops.ones_initializer())
        moving_mean_initializer = param_initializers.get('moving_mean', init_ops.zeros_initializer())
        moving_variance_initializer = param_initializers.get('moving_variance', init_ops.ones_initializer())

        if not param_regularizers:
            param_regularizers = {}

        beta_regularizer = param_regularizers.get('beta')
        gamma_regularizer = param_regularizers.get('gamma')

        return layers.batch_norm(
            inputs=inputs,
            axis=axis,
            momentum=decay,
            epsilon=epsilon,
            center=center,
            scale=scale,
            beta_initializer=beta_initializer,
            gamma_initializer=gamma_initializer,
            moving_mean_initializer=moving_mean_initializer,
            moving_variance_initializer=moving_variance_initializer,
            beta_regularizer=beta_regularizer,
            gamma_regularizer=gamma_regularizer,
            trainable=trainable,
            renorm=renorm,
            renorm_clipping=renorm_clipping,
            renorm_momentum=renorm_decay,
            fused=fused,
            training=is_training)
Example #2
0
def batch_norm(inputs,
               decay=0.999,
               center=True,
               scale=False,
               epsilon=0.001,
               activation_fn=None,
               param_initializers=None,
               param_regularizers=None,
               updates_collections=ops.GraphKeys.UPDATE_OPS,
               is_training=True,
               reuse=None,
               variables_collections=None,
               outputs_collections=None,
               trainable=True,
               batch_weights=None,
               fused=False,
               data_format=DATA_FORMAT_NHWC,
               zero_debias_moving_mean=False,
               scope=None,
               renorm=False,
               renorm_clipping=None,
               renorm_decay=0.99):
    scope = _default_scope(scope, 'BATCH_NORM', 'BatchNorm')
    if data_format not in (DATA_FORMAT_NCHW, DATA_FORMAT_NHWC):
        raise ValueError('data_format has to be either NCHW or NHWC.')
    axis = 1 if data_format == DATA_FORMAT_NCHW else -1

    with vs.variable_scope(scope, reuse=reuse) as sc:
        if not param_initializers:
            param_initializers = {}
        beta_initializer = param_initializers.get('beta', init_ops.zeros_initializer())
        gamma_initializer = param_initializers.get('gamma', init_ops.ones_initializer())
        moving_mean_initializer = param_initializers.get('moving_mean', init_ops.zeros_initializer())
        moving_variance_initializer = param_initializers.get('moving_variance', init_ops.ones_initializer())

        if not param_regularizers:
            param_regularizers = {}

        beta_regularizer = param_regularizers.get('beta')
        gamma_regularizer = param_regularizers.get('gamma')

        return layers.batch_norm(
            inputs=inputs,
            axis=axis,
            momentum=decay,
            epsilon=epsilon,
            center=center,
            scale=scale,
            beta_initializer=beta_initializer,
            gamma_initializer=gamma_initializer,
            moving_mean_initializer=moving_mean_initializer,
            moving_variance_initializer=moving_variance_initializer,
            beta_regularizer=beta_regularizer,
            gamma_regularizer=gamma_regularizer,
            trainable=trainable,
            renorm=renorm,
            renorm_clipping=renorm_clipping,
            renorm_momentum=renorm_decay,
            fused=fused,
            training=is_training)
Example #3
0
 def __init__(self,
              axis=-1,
              momentum=0.99,
              epsilon=1e-3,
              center=True,
              scale=True,
              beta_initializer=init_ops.zeros_initializer(),
              gamma_initializer=init_ops.ones_initializer(),
              moving_mean_initializer=init_ops.zeros_initializer(),
              moving_variance_initializer=init_ops.ones_initializer(),
              beta_regularizer=None,
              gamma_regularizer=None,
              renorm=False,
              renorm_clipping=None,
              renorm_momentum=0.99,
              fused=False,
              trainable=True,
              name=None,
              **kwargs):
     super(BatchNormalization, self).__init__(trainable=trainable,
                                              name=name,
                                              **kwargs)
     self.axis = axis
     self.momentum = momentum
     self.epsilon = epsilon
     self.center = center
     self.scale = scale
     self.beta_initializer = beta_initializer
     self.gamma_initializer = gamma_initializer
     self.moving_mean_initializer = moving_mean_initializer
     self.moving_variance_initializer = moving_variance_initializer
     self.beta_regularizer = beta_regularizer
     self.gamma_regularizer = gamma_regularizer
     self.renorm = renorm
     self.fused = fused
     self.trainable = trainable
     if fused:
         if not center or not scale:
             raise ValueError(
                 'fused norm requires both center and scale set to be True.'
             )
     if renorm:
         raise ValueError('renorm is currently not supported.')
Example #4
0
def batch_normalization(
        inputs,
        axis=-1,
        momentum=0.99,
        epsilon=1e-3,
        center=True,
        scale=True,
        beta_initializer=init_ops.zeros_initializer(),
        gamma_initializer=init_ops.ones_initializer(),
        moving_mean_initializer=init_ops.zeros_initializer(),
        moving_variance_initializer=init_ops.ones_initializer(),
        beta_regularizer=None,
        gamma_regularizer=None,
        training=False,
        trainable=True,
        name=None,
        reuse=None,
        renorm=False,
        renorm_clipping=None,
        renorm_momentum=0.99,
        fused=False):
    layer = BatchNormalization(
        axis=axis,
        momentum=momentum,
        epsilon=epsilon,
        center=center,
        scale=scale,
        beta_initializer=beta_initializer,
        gamma_initializer=gamma_initializer,
        moving_mean_initializer=moving_mean_initializer,
        moving_variance_initializer=moving_variance_initializer,
        beta_regularizer=beta_regularizer,
        gamma_regularizer=gamma_regularizer,
        renorm=renorm,
        renorm_clipping=renorm_clipping,
        renorm_momentum=renorm_momentum,
        fused=fused,
        trainable=trainable,
        name=name,
        _reuse=reuse,
        _scope=name)
    return layer.apply(inputs, training=training)
Example #5
0
def batch_normalization(inputs,
                        axis=-1,
                        momentum=0.99,
                        epsilon=1e-3,
                        center=True,
                        scale=True,
                        beta_initializer=init_ops.zeros_initializer(),
                        gamma_initializer=init_ops.ones_initializer(),
                        moving_mean_initializer=init_ops.zeros_initializer(),
                        moving_variance_initializer=init_ops.ones_initializer(),
                        beta_regularizer=None,
                        gamma_regularizer=None,
                        training=False,
                        trainable=True,
                        name=None,
                        reuse=None,
                        renorm=False,
                        renorm_clipping=None,
                        renorm_momentum=0.99,
                        fused=False):
  layer = BatchNormalization(
      axis=axis,
      momentum=momentum,
      epsilon=epsilon,
      center=center,
      scale=scale,
      beta_initializer=beta_initializer,
      gamma_initializer=gamma_initializer,
      moving_mean_initializer=moving_mean_initializer,
      moving_variance_initializer=moving_variance_initializer,
      beta_regularizer=beta_regularizer,
      gamma_regularizer=gamma_regularizer,
      renorm=renorm,
      renorm_clipping=renorm_clipping,
      renorm_momentum=renorm_momentum,
      fused=fused,
      trainable=trainable,
      name=name,
      _reuse=reuse,
      _scope=name)
  return layer.apply(inputs, training=training)
Example #6
0
 def __init__(self,
              axis=-1,
              momentum=0.99,
              epsilon=1e-3,
              center=True,
              scale=True,
              beta_initializer=init_ops.zeros_initializer(),
              gamma_initializer=init_ops.ones_initializer(),
              moving_mean_initializer=init_ops.zeros_initializer(),
              moving_variance_initializer=init_ops.ones_initializer(),
              beta_regularizer=None,
              gamma_regularizer=None,
              renorm=False,
              renorm_clipping=None,
              renorm_momentum=0.99,
              fused=False,
              trainable=True,
              name=None,
              **kwargs):
     super(BatchNormalization, self).__init__(trainable=trainable, name=name, **kwargs)
     self.axis = axis
     self.momentum = momentum
     self.epsilon = epsilon
     self.center = center
     self.scale = scale
     self.beta_initializer = beta_initializer
     self.gamma_initializer = gamma_initializer
     self.moving_mean_initializer = moving_mean_initializer
     self.moving_variance_initializer = moving_variance_initializer
     self.beta_regularizer = beta_regularizer
     self.gamma_regularizer = gamma_regularizer
     self.renorm = renorm
     self.fused = fused
     self.trainable = trainable
     if fused:
         if not center or not scale:
             raise ValueError('fused norm requires both center and scale set to be True.')
     if renorm:
         raise ValueError('renorm is currently not supported.')